From a11da022fc8f8fe7c72aedb92cfdb7e1e38e985a Mon Sep 17 00:00:00 2001 From: chenheng Date: Wed, 20 Jan 2016 10:33:24 +0800 Subject: [PATCH] HBASE-15128 Disable region splits and merges in HBCK --- .../java/org/apache/hadoop/hbase/client/Admin.java | 30 +- .../hbase/client/ConnectionImplementation.java | 12 + .../org/apache/hadoop/hbase/client/HBaseAdmin.java | 32 + .../hadoop/hbase/protobuf/RequestConverter.java | 47 + .../hadoop/hbase/zookeeper/ZooKeeperWatcher.java | 10 + .../hbase/protobuf/generated/MasterProtos.java | 4302 +++++++++++++++----- .../hbase/protobuf/generated/SnapshotProtos.java | 500 +-- .../hbase/protobuf/generated/SwitchProtos.java | 485 +++ hbase-protocol/src/main/protobuf/Master.proto | 36 + hbase-protocol/src/main/protobuf/Switch.proto | 29 + .../hadoop/hbase/master/AssignmentManager.java | 9 + .../org/apache/hadoop/hbase/master/HMaster.java | 29 + .../hadoop/hbase/master/MasterRpcServices.java | 42 + .../org/apache/hadoop/hbase/util/HBaseFsck.java | 35 + .../hbase/zookeeper/SwitchTrackerManager.java | 150 + .../hadoop/hbase/client/TestSwitchStatus.java | 189 + hbase-shell/src/main/ruby/hbase/admin.rb | 32 + hbase-shell/src/main/ruby/shell.rb | 2 + .../src/main/ruby/shell/commands/get_switch.rb | 41 + .../src/main/ruby/shell/commands/set_switch.rb | 42 + 20 files changed, 4857 insertions(+), 1197 deletions(-) create mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SwitchProtos.java create mode 100644 hbase-protocol/src/main/protobuf/Switch.proto create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/SwitchTrackerManager.java create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSwitchStatus.java create mode 100644 hbase-shell/src/main/ruby/shell/commands/get_switch.rb create mode 100644 hbase-shell/src/main/ruby/shell/commands/set_switch.rb diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index d7b52d5..04a0d66 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -1678,11 +1678,28 @@ public interface Admin extends Abortable, Closeable { List getSecurityCapabilities() throws IOException; /** + * Turn the switches on or off. + * + * @param enabled enabled or not + * @param synchronous If true, it waits until current split() call, if outstanding, to return. + * @param switchTypes switchType list + * @return Previous switch value array + */ + boolean[] setSwitches(final boolean enabled, final boolean synchronous, + final SwitchType... switchTypes) throws IOException; + + /** + * Query the current state of the switch + * + * @return true if the switch is enabled, false otherwise. + */ + boolean isSwitchEnabled(final SwitchType switchType) throws IOException; + + /** * Currently, there are only two compact types: * {@code NORMAL} means do store files compaction; * {@code MOB} means do mob files compaction. * */ - @InterfaceAudience.Public @InterfaceStability.Unstable public enum CompactType { @@ -1692,4 +1709,15 @@ public interface Admin extends Abortable, Closeable { CompactType(int value) {} } + + /** + * TODO: we should add the other two types: Balance and Region Normalizer + * */ + @InterfaceAudience.Public + @InterfaceStability.Unstable + public enum SwitchType { + SPLIT, + MERGE + } + } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index d730287..5673a13 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -1787,6 +1787,18 @@ class ConnectionImplementation implements ClusterConnection, Closeable { } @Override + public MasterProtos.SetSwitchesResponse setSwitches(RpcController controller, + MasterProtos.SetSwitchesRequest request) throws ServiceException { + return stub.setSwitches(controller, request); + } + + @Override + public MasterProtos.IsSwitchEnabledResponse isSwitchEnabled(RpcController controller, + MasterProtos.IsSwitchEnabledRequest request) throws ServiceException { + return stub.isSwitchEnabled(controller, request); + } + + @Override public IsNormalizerEnabledResponse isNormalizerEnabled(RpcController controller, IsNormalizerEnabledRequest request) throws ServiceException { return stub.isNormalizerEnabled(controller, request); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index db94ff4..1bd2c73 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -89,6 +89,7 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescripti import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; @@ -3378,6 +3379,37 @@ public class HBaseAdmin implements Admin { } } + @Override + public boolean[] setSwitches(final boolean enabled, final boolean synchronous, + final SwitchType... switchTypes) + throws IOException { + return executeCallable(new MasterCallable(getConnection()) { + @Override + public boolean[] call(int callTimeout) throws ServiceException { + MasterProtos.SetSwitchesResponse response = master.setSwitches(null, + RequestConverter.buildSetSwitchesRequest(enabled, synchronous, + switchTypes)); + boolean[] result = new boolean[switchTypes.length]; + int i = 0; + for (Boolean prevValue : response.getPrevValueList()) { + result[i++] = prevValue; + } + return result; + } + }); + } + + @Override + public boolean isSwitchEnabled(final SwitchType switchType) throws IOException { + return executeCallable(new MasterCallable(getConnection()) { + @Override + public Boolean call(int callTimeout) throws ServiceException { + return master.isSwitchEnabled(null, + RequestConverter.buildIsSwitchEnabledRequest(switchType)).getEnabled(); + } + }); + } + private HRegionInfo getMobRegionInfo(TableName tableName) { return new HRegionInfo(tableName, Bytes.toBytes(".mob"), HConstants.EMPTY_END_ROW, false, 0); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java index 9d659fc..05f594b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Action; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -76,6 +77,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest; @@ -95,6 +97,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabled import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest; @@ -103,6 +106,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest; @@ -1684,4 +1688,47 @@ public final class RequestConverter { public static SetNormalizerRunningRequest buildSetNormalizerRunningRequest(boolean on) { return SetNormalizerRunningRequest.newBuilder().setOn(on).build(); } + + /** + * Creates a protocol buffer IsSwitchEnabledRequest + * + * @param switchType see{@link org.apache.hadoop.hbase.client.Admin.SwitchType} + * @return a IsSwitchEnabledRequest + */ + public static IsSwitchEnabledRequest buildIsSwitchEnabledRequest(Admin.SwitchType switchType) { + IsSwitchEnabledRequest.Builder builder = IsSwitchEnabledRequest.newBuilder(); + builder.setSwitchType(convert(switchType)); + return builder.build(); + } + + /** + * Creates a protocol buffer SetSwitchesRequest + * + * @param enabled switch is enabled or not + * @param synchronous set switch sync? + * @param switchTypes see{@link org.apache.hadoop.hbase.client.Admin.SwitchType}, it is a list. + * @return a SetSwitchesRequest + */ + public static SetSwitchesRequest buildSetSwitchesRequest(boolean enabled, boolean synchronous, + Admin.SwitchType... switchTypes) { + SetSwitchesRequest.Builder builder = SetSwitchesRequest.newBuilder(); + builder.setEnabled(enabled); + builder.setSynchronous(synchronous); + for (Admin.SwitchType switchType : switchTypes) { + builder.addSwitchTypes(convert(switchType)); + } + return builder.build(); + } + + private static MasterProtos.MasterSwitchType convert(Admin.SwitchType switchType) { + switch (switchType) { + case SPLIT: + return MasterProtos.MasterSwitchType.SPLIT; + case MERGE: + return MasterProtos.MasterSwitchType.MERGE; + default: + break; + } + throw new UnsupportedOperationException("Unsupport switch type:" + switchType); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java index f7a2175..a295754 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java @@ -113,6 +113,8 @@ public class ZooKeeperWatcher implements Watcher, Abortable, Closeable { public String balancerZNode; // znode containing the state of region normalizer private String regionNormalizerZNode; + // znode containing the state of all switches, currently there are split and merge child node. + private String switchZNode; // znode containing the lock for the tables public String tableLockZNode; // znode containing the state of recovering regions @@ -382,6 +384,7 @@ public class ZooKeeperWatcher implements Watcher, Abortable, Closeable { conf.get("zookeeper.znode.balancer", "balancer")); regionNormalizerZNode = ZKUtil.joinZNode(baseZNode, conf.get("zookeeper.znode.regionNormalizer", "normalizer")); + switchZNode = ZKUtil.joinZNode(baseZNode, conf.get("zookeeper.znode.switch", "switch")); tableLockZNode = ZKUtil.joinZNode(baseZNode, conf.get("zookeeper.znode.tableLock", "table-lock")); recoveringRegionsZNode = ZKUtil.joinZNode(baseZNode, @@ -741,4 +744,11 @@ public class ZooKeeperWatcher implements Watcher, Abortable, Closeable { public String getRegionNormalizerZNode() { return regionNormalizerZNode; } + + /** + * @return ZK node for switch + * */ + public String getSwitchZNode() { + return switchZNode; + } } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java index 043d549..5018df0 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java @@ -8,6 +8,88 @@ public final class MasterProtos { public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } + /** + * Protobuf enum {@code hbase.pb.MasterSwitchType} + */ + public enum MasterSwitchType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SPLIT = 0; + */ + SPLIT(0, 0), + /** + * MERGE = 1; + */ + MERGE(1, 1), + ; + + /** + * SPLIT = 0; + */ + public static final int SPLIT_VALUE = 0; + /** + * MERGE = 1; + */ + public static final int MERGE_VALUE = 1; + + + public final int getNumber() { return value; } + + public static MasterSwitchType valueOf(int value) { + switch (value) { + case 0: return SPLIT; + case 1: return MERGE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public MasterSwitchType findValueByNumber(int number) { + return MasterSwitchType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.getDescriptor().getEnumTypes().get(0); + } + + private static final MasterSwitchType[] VALUES = values(); + + public static MasterSwitchType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private MasterSwitchType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:hbase.pb.MasterSwitchType) + } + public interface AddColumnRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -28764,28 +28846,62 @@ public final class MasterProtos { // @@protoc_insertion_point(class_scope:hbase.pb.IsBalancerEnabledResponse) } - public interface NormalizeRequestOrBuilder + public interface SetSwitchesRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { + + // required bool enabled = 1; + /** + * required bool enabled = 1; + */ + boolean hasEnabled(); + /** + * required bool enabled = 1; + */ + boolean getEnabled(); + + // optional bool synchronous = 2; + /** + * optional bool synchronous = 2; + */ + boolean hasSynchronous(); + /** + * optional bool synchronous = 2; + */ + boolean getSynchronous(); + + // repeated .hbase.pb.MasterSwitchType switch_types = 3; + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + java.util.List getSwitchTypesList(); + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + int getSwitchTypesCount(); + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchTypes(int index); } /** - * Protobuf type {@code hbase.pb.NormalizeRequest} + * Protobuf type {@code hbase.pb.SetSwitchesRequest} */ - public static final class NormalizeRequest extends + public static final class SetSwitchesRequest extends com.google.protobuf.GeneratedMessage - implements NormalizeRequestOrBuilder { - // Use NormalizeRequest.newBuilder() to construct. - private NormalizeRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements SetSwitchesRequestOrBuilder { + // Use SetSwitchesRequest.newBuilder() to construct. + private SetSwitchesRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private NormalizeRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private SetSwitchesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final NormalizeRequest defaultInstance; - public static NormalizeRequest getDefaultInstance() { + private static final SetSwitchesRequest defaultInstance; + public static SetSwitchesRequest getDefaultInstance() { return defaultInstance; } - public NormalizeRequest getDefaultInstanceForType() { + public SetSwitchesRequest getDefaultInstanceForType() { return defaultInstance; } @@ -28795,11 +28911,12 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private NormalizeRequest( + private SetSwitchesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); + int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -28817,6 +28934,49 @@ public final class MasterProtos { } break; } + case 8: { + bitField0_ |= 0x00000001; + enabled_ = input.readBool(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + synchronous_ = input.readBool(); + break; + } + case 24: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(3, rawValue); + } else { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + switchTypes_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + switchTypes_.add(value); + } + break; + } + case 26: { + int length = input.readRawVarint32(); + int oldLimit = input.pushLimit(length); + while(input.getBytesUntilLimit() > 0) { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(3, rawValue); + } else { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + switchTypes_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + switchTypes_.add(value); + } + } + input.popLimit(oldLimit); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -28825,44 +28985,109 @@ public final class MasterProtos { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + switchTypes_ = java.util.Collections.unmodifiableList(switchTypes_); + } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSwitchesRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSwitchesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public NormalizeRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetSwitchesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new NormalizeRequest(input, extensionRegistry); + return new SetSwitchesRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } + private int bitField0_; + // required bool enabled = 1; + public static final int ENABLED_FIELD_NUMBER = 1; + private boolean enabled_; + /** + * required bool enabled = 1; + */ + public boolean hasEnabled() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool enabled = 1; + */ + public boolean getEnabled() { + return enabled_; + } + + // optional bool synchronous = 2; + public static final int SYNCHRONOUS_FIELD_NUMBER = 2; + private boolean synchronous_; + /** + * optional bool synchronous = 2; + */ + public boolean hasSynchronous() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bool synchronous = 2; + */ + public boolean getSynchronous() { + return synchronous_; + } + + // repeated .hbase.pb.MasterSwitchType switch_types = 3; + public static final int SWITCH_TYPES_FIELD_NUMBER = 3; + private java.util.List switchTypes_; + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public java.util.List getSwitchTypesList() { + return switchTypes_; + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public int getSwitchTypesCount() { + return switchTypes_.size(); + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchTypes(int index) { + return switchTypes_.get(index); + } + private void initFields() { + enabled_ = false; + synchronous_ = false; + switchTypes_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; + if (!hasEnabled()) { + memoizedIsInitialized = 0; + return false; + } memoizedIsInitialized = 1; return true; } @@ -28870,6 +29095,15 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, enabled_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, synchronous_); + } + for (int i = 0; i < switchTypes_.size(); i++) { + output.writeEnum(3, switchTypes_.get(i).getNumber()); + } getUnknownFields().writeTo(output); } @@ -28879,6 +29113,23 @@ public final class MasterProtos { if (size != -1) return size; size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, enabled_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, synchronous_); + } + { + int dataSize = 0; + for (int i = 0; i < switchTypes_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeEnumSizeNoTag(switchTypes_.get(i).getNumber()); + } + size += dataSize; + size += 1 * switchTypes_.size(); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -28896,12 +29147,24 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest) obj; boolean result = true; + result = result && (hasEnabled() == other.hasEnabled()); + if (hasEnabled()) { + result = result && (getEnabled() + == other.getEnabled()); + } + result = result && (hasSynchronous() == other.hasSynchronous()); + if (hasSynchronous()) { + result = result && (getSynchronous() + == other.getSynchronous()); + } + result = result && getSwitchTypesList() + .equals(other.getSwitchTypesList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -28915,58 +29178,70 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasEnabled()) { + hash = (37 * hash) + ENABLED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getEnabled()); + } + if (hasSynchronous()) { + hash = (37 * hash) + SYNCHRONOUS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getSynchronous()); + } + if (getSwitchTypesCount() > 0) { + hash = (37 * hash) + SWITCH_TYPES_FIELD_NUMBER; + hash = (53 * hash) + hashEnumList(getSwitchTypesList()); + } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -28975,7 +29250,7 @@ public final class MasterProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -28987,24 +29262,24 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.NormalizeRequest} + * Protobuf type {@code hbase.pb.SetSwitchesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSwitchesRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSwitchesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -29024,6 +29299,12 @@ public final class MasterProtos { public Builder clear() { super.clear(); + enabled_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + synchronous_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + switchTypes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); return this; } @@ -29033,43 +29314,79 @@ public final class MasterProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSwitchesRequest_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.enabled_ = enabled_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.synchronous_ = synchronous_; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + switchTypes_ = java.util.Collections.unmodifiableList(switchTypes_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.switchTypes_ = switchTypes_; + result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest.getDefaultInstance()) return this; + if (other.hasEnabled()) { + setEnabled(other.getEnabled()); + } + if (other.hasSynchronous()) { + setSynchronous(other.getSynchronous()); + } + if (!other.switchTypes_.isEmpty()) { + if (switchTypes_.isEmpty()) { + switchTypes_ = other.switchTypes_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureSwitchTypesIsMutable(); + switchTypes_.addAll(other.switchTypes_); + } + onChanged(); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { + if (!hasEnabled()) { + + return false; + } return true; } @@ -29077,11 +29394,11 @@ public final class MasterProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -29090,50 +29407,193 @@ public final class MasterProtos { } return this; } + private int bitField0_; - // @@protoc_insertion_point(builder_scope:hbase.pb.NormalizeRequest) + // required bool enabled = 1; + private boolean enabled_ ; + /** + * required bool enabled = 1; + */ + public boolean hasEnabled() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool enabled = 1; + */ + public boolean getEnabled() { + return enabled_; + } + /** + * required bool enabled = 1; + */ + public Builder setEnabled(boolean value) { + bitField0_ |= 0x00000001; + enabled_ = value; + onChanged(); + return this; + } + /** + * required bool enabled = 1; + */ + public Builder clearEnabled() { + bitField0_ = (bitField0_ & ~0x00000001); + enabled_ = false; + onChanged(); + return this; + } + + // optional bool synchronous = 2; + private boolean synchronous_ ; + /** + * optional bool synchronous = 2; + */ + public boolean hasSynchronous() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bool synchronous = 2; + */ + public boolean getSynchronous() { + return synchronous_; + } + /** + * optional bool synchronous = 2; + */ + public Builder setSynchronous(boolean value) { + bitField0_ |= 0x00000002; + synchronous_ = value; + onChanged(); + return this; + } + /** + * optional bool synchronous = 2; + */ + public Builder clearSynchronous() { + bitField0_ = (bitField0_ & ~0x00000002); + synchronous_ = false; + onChanged(); + return this; + } + + // repeated .hbase.pb.MasterSwitchType switch_types = 3; + private java.util.List switchTypes_ = + java.util.Collections.emptyList(); + private void ensureSwitchTypesIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + switchTypes_ = new java.util.ArrayList(switchTypes_); + bitField0_ |= 0x00000004; + } + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public java.util.List getSwitchTypesList() { + return java.util.Collections.unmodifiableList(switchTypes_); + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public int getSwitchTypesCount() { + return switchTypes_.size(); + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchTypes(int index) { + return switchTypes_.get(index); + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public Builder setSwitchTypes( + int index, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSwitchTypesIsMutable(); + switchTypes_.set(index, value); + onChanged(); + return this; + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public Builder addSwitchTypes(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSwitchTypesIsMutable(); + switchTypes_.add(value); + onChanged(); + return this; + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public Builder addAllSwitchTypes( + java.lang.Iterable values) { + ensureSwitchTypesIsMutable(); + super.addAll(values, switchTypes_); + onChanged(); + return this; + } + /** + * repeated .hbase.pb.MasterSwitchType switch_types = 3; + */ + public Builder clearSwitchTypes() { + switchTypes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.SetSwitchesRequest) } static { - defaultInstance = new NormalizeRequest(true); + defaultInstance = new SetSwitchesRequest(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeRequest) + // @@protoc_insertion_point(class_scope:hbase.pb.SetSwitchesRequest) } - public interface NormalizeResponseOrBuilder + public interface SetSwitchesResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required bool normalizer_ran = 1; + // repeated bool prev_value = 1; /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - boolean hasNormalizerRan(); + java.util.List getPrevValueList(); /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - boolean getNormalizerRan(); + int getPrevValueCount(); + /** + * repeated bool prev_value = 1; + */ + boolean getPrevValue(int index); } /** - * Protobuf type {@code hbase.pb.NormalizeResponse} + * Protobuf type {@code hbase.pb.SetSwitchesResponse} */ - public static final class NormalizeResponse extends + public static final class SetSwitchesResponse extends com.google.protobuf.GeneratedMessage - implements NormalizeResponseOrBuilder { - // Use NormalizeResponse.newBuilder() to construct. - private NormalizeResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements SetSwitchesResponseOrBuilder { + // Use SetSwitchesResponse.newBuilder() to construct. + private SetSwitchesResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private NormalizeResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private SetSwitchesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final NormalizeResponse defaultInstance; - public static NormalizeResponse getDefaultInstance() { + private static final SetSwitchesResponse defaultInstance; + public static SetSwitchesResponse getDefaultInstance() { return defaultInstance; } - public NormalizeResponse getDefaultInstanceForType() { + public SetSwitchesResponse getDefaultInstanceForType() { return defaultInstance; } @@ -29143,7 +29603,7 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private NormalizeResponse( + private SetSwitchesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -29167,8 +29627,24 @@ public final class MasterProtos { break; } case 8: { - bitField0_ |= 0x00000001; - normalizerRan_ = input.readBool(); + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + prevValue_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + prevValue_.add(input.readBool()); + break; + } + case 10: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { + prevValue_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + while (input.getBytesUntilLimit() > 0) { + prevValue_.add(input.readBool()); + } + input.popLimit(limit); break; } } @@ -29179,66 +29655,71 @@ public final class MasterProtos { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + prevValue_ = java.util.Collections.unmodifiableList(prevValue_); + } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSwitchesResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSwitchesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public NormalizeResponse parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetSwitchesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new NormalizeResponse(input, extensionRegistry); + return new SetSwitchesResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } - private int bitField0_; - // required bool normalizer_ran = 1; - public static final int NORMALIZER_RAN_FIELD_NUMBER = 1; - private boolean normalizerRan_; + // repeated bool prev_value = 1; + public static final int PREV_VALUE_FIELD_NUMBER = 1; + private java.util.List prevValue_; /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public boolean hasNormalizerRan() { - return ((bitField0_ & 0x00000001) == 0x00000001); + public java.util.List + getPrevValueList() { + return prevValue_; } /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public boolean getNormalizerRan() { - return normalizerRan_; + public int getPrevValueCount() { + return prevValue_.size(); + } + /** + * repeated bool prev_value = 1; + */ + public boolean getPrevValue(int index) { + return prevValue_.get(index); } private void initFields() { - normalizerRan_ = false; + prevValue_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasNormalizerRan()) { - memoizedIsInitialized = 0; - return false; - } memoizedIsInitialized = 1; return true; } @@ -29246,8 +29727,8 @@ public final class MasterProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, normalizerRan_); + for (int i = 0; i < prevValue_.size(); i++) { + output.writeBool(1, prevValue_.get(i)); } getUnknownFields().writeTo(output); } @@ -29258,9 +29739,11 @@ public final class MasterProtos { if (size != -1) return size; size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, normalizerRan_); + { + int dataSize = 0; + dataSize = 1 * getPrevValueList().size(); + size += dataSize; + size += 1 * getPrevValueList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -29279,17 +29762,14 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse) obj; boolean result = true; - result = result && (hasNormalizerRan() == other.hasNormalizerRan()); - if (hasNormalizerRan()) { - result = result && (getNormalizerRan() - == other.getNormalizerRan()); - } + result = result && getPrevValueList() + .equals(other.getPrevValueList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -29303,62 +29783,62 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasNormalizerRan()) { - hash = (37 * hash) + NORMALIZER_RAN_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getNormalizerRan()); + if (getPrevValueCount() > 0) { + hash = (37 * hash) + PREV_VALUE_FIELD_NUMBER; + hash = (53 * hash) + getPrevValueList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -29367,7 +29847,7 @@ public final class MasterProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -29379,24 +29859,24 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.NormalizeResponse} + * Protobuf type {@code hbase.pb.SetSwitchesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSwitchesResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSwitchesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -29416,7 +29896,7 @@ public final class MasterProtos { public Builder clear() { super.clear(); - normalizerRan_ = false; + prevValue_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -29427,57 +29907,59 @@ public final class MasterProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetSwitchesResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse(this); int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + prevValue_ = java.util.Collections.unmodifiableList(prevValue_); + bitField0_ = (bitField0_ & ~0x00000001); } - result.normalizerRan_ = normalizerRan_; - result.bitField0_ = to_bitField0_; + result.prevValue_ = prevValue_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance()) return this; - if (other.hasNormalizerRan()) { - setNormalizerRan(other.getNormalizerRan()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.getDefaultInstance()) return this; + if (!other.prevValue_.isEmpty()) { + if (prevValue_.isEmpty()) { + prevValue_ = other.prevValue_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensurePrevValueIsMutable(); + prevValue_.addAll(other.prevValue_); + } + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - if (!hasNormalizerRan()) { - - return false; - } return true; } @@ -29485,11 +29967,11 @@ public final class MasterProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -29500,82 +29982,115 @@ public final class MasterProtos { } private int bitField0_; - // required bool normalizer_ran = 1; - private boolean normalizerRan_ ; + // repeated bool prev_value = 1; + private java.util.List prevValue_ = java.util.Collections.emptyList(); + private void ensurePrevValueIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + prevValue_ = new java.util.ArrayList(prevValue_); + bitField0_ |= 0x00000001; + } + } /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public boolean hasNormalizerRan() { - return ((bitField0_ & 0x00000001) == 0x00000001); + public java.util.List + getPrevValueList() { + return java.util.Collections.unmodifiableList(prevValue_); } /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public boolean getNormalizerRan() { - return normalizerRan_; + public int getPrevValueCount() { + return prevValue_.size(); } /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public Builder setNormalizerRan(boolean value) { - bitField0_ |= 0x00000001; - normalizerRan_ = value; + public boolean getPrevValue(int index) { + return prevValue_.get(index); + } + /** + * repeated bool prev_value = 1; + */ + public Builder setPrevValue( + int index, boolean value) { + ensurePrevValueIsMutable(); + prevValue_.set(index, value); onChanged(); return this; } /** - * required bool normalizer_ran = 1; + * repeated bool prev_value = 1; */ - public Builder clearNormalizerRan() { + public Builder addPrevValue(boolean value) { + ensurePrevValueIsMutable(); + prevValue_.add(value); + onChanged(); + return this; + } + /** + * repeated bool prev_value = 1; + */ + public Builder addAllPrevValue( + java.lang.Iterable values) { + ensurePrevValueIsMutable(); + super.addAll(values, prevValue_); + onChanged(); + return this; + } + /** + * repeated bool prev_value = 1; + */ + public Builder clearPrevValue() { + prevValue_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); - normalizerRan_ = false; onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.NormalizeResponse) + // @@protoc_insertion_point(builder_scope:hbase.pb.SetSwitchesResponse) } static { - defaultInstance = new NormalizeResponse(true); + defaultInstance = new SetSwitchesResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeResponse) + // @@protoc_insertion_point(class_scope:hbase.pb.SetSwitchesResponse) } - public interface SetNormalizerRunningRequestOrBuilder + public interface IsSwitchEnabledRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required bool on = 1; + // required .hbase.pb.MasterSwitchType switch_type = 1; /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - boolean hasOn(); + boolean hasSwitchType(); /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - boolean getOn(); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchType(); } /** - * Protobuf type {@code hbase.pb.SetNormalizerRunningRequest} + * Protobuf type {@code hbase.pb.IsSwitchEnabledRequest} */ - public static final class SetNormalizerRunningRequest extends + public static final class IsSwitchEnabledRequest extends com.google.protobuf.GeneratedMessage - implements SetNormalizerRunningRequestOrBuilder { - // Use SetNormalizerRunningRequest.newBuilder() to construct. - private SetNormalizerRunningRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements IsSwitchEnabledRequestOrBuilder { + // Use IsSwitchEnabledRequest.newBuilder() to construct. + private IsSwitchEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private SetNormalizerRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private IsSwitchEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final SetNormalizerRunningRequest defaultInstance; - public static SetNormalizerRunningRequest getDefaultInstance() { + private static final IsSwitchEnabledRequest defaultInstance; + public static IsSwitchEnabledRequest getDefaultInstance() { return defaultInstance; } - public SetNormalizerRunningRequest getDefaultInstanceForType() { + public IsSwitchEnabledRequest getDefaultInstanceForType() { return defaultInstance; } @@ -29585,7 +30100,7 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private SetNormalizerRunningRequest( + private IsSwitchEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -29609,8 +30124,14 @@ public final class MasterProtos { break; } case 8: { - bitField0_ |= 0x00000001; - on_ = input.readBool(); + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + switchType_ = value; + } break; } } @@ -29627,57 +30148,57 @@ public final class MasterProtos { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSwitchEnabledRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSwitchEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetNormalizerRunningRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsSwitchEnabledRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new SetNormalizerRunningRequest(input, extensionRegistry); + return new IsSwitchEnabledRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } private int bitField0_; - // required bool on = 1; - public static final int ON_FIELD_NUMBER = 1; - private boolean on_; + // required .hbase.pb.MasterSwitchType switch_type = 1; + public static final int SWITCH_TYPE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType switchType_; /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public boolean hasOn() { + public boolean hasSwitchType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public boolean getOn() { - return on_; + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchType() { + return switchType_; } private void initFields() { - on_ = false; + switchType_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasOn()) { + if (!hasSwitchType()) { memoizedIsInitialized = 0; return false; } @@ -29689,7 +30210,7 @@ public final class MasterProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, on_); + output.writeEnum(1, switchType_.getNumber()); } getUnknownFields().writeTo(output); } @@ -29702,7 +30223,7 @@ public final class MasterProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, on_); + .computeEnumSize(1, switchType_.getNumber()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -29721,16 +30242,16 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest) obj; boolean result = true; - result = result && (hasOn() == other.hasOn()); - if (hasOn()) { - result = result && (getOn() - == other.getOn()); + result = result && (hasSwitchType() == other.hasSwitchType()); + if (hasSwitchType()) { + result = result && + (getSwitchType() == other.getSwitchType()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -29745,62 +30266,62 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasOn()) { - hash = (37 * hash) + ON_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getOn()); + if (hasSwitchType()) { + hash = (37 * hash) + SWITCH_TYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getSwitchType()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -29809,7 +30330,7 @@ public final class MasterProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -29821,24 +30342,24 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.SetNormalizerRunningRequest} + * Protobuf type {@code hbase.pb.IsSwitchEnabledRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSwitchEnabledRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSwitchEnabledRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -29858,7 +30379,7 @@ public final class MasterProtos { public Builder clear() { super.clear(); - on_ = false; + switchType_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -29869,54 +30390,54 @@ public final class MasterProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSwitchEnabledRequest_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.on_ = on_; + result.switchType_ = switchType_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance()) return this; - if (other.hasOn()) { - setOn(other.getOn()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest.getDefaultInstance()) return this; + if (other.hasSwitchType()) { + setSwitchType(other.getSwitchType()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - if (!hasOn()) { + if (!hasSwitchType()) { return false; } @@ -29927,11 +30448,11 @@ public final class MasterProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -29942,82 +30463,85 @@ public final class MasterProtos { } private int bitField0_; - // required bool on = 1; - private boolean on_ ; + // required .hbase.pb.MasterSwitchType switch_type = 1; + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType switchType_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public boolean hasOn() { + public boolean hasSwitchType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public boolean getOn() { - return on_; + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType getSwitchType() { + return switchType_; } /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public Builder setOn(boolean value) { + public Builder setSwitchType(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType value) { + if (value == null) { + throw new NullPointerException(); + } bitField0_ |= 0x00000001; - on_ = value; + switchType_ = value; onChanged(); return this; } /** - * required bool on = 1; + * required .hbase.pb.MasterSwitchType switch_type = 1; */ - public Builder clearOn() { + public Builder clearSwitchType() { bitField0_ = (bitField0_ & ~0x00000001); - on_ = false; + switchType_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterSwitchType.SPLIT; onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.SetNormalizerRunningRequest) + // @@protoc_insertion_point(builder_scope:hbase.pb.IsSwitchEnabledRequest) } static { - defaultInstance = new SetNormalizerRunningRequest(true); + defaultInstance = new IsSwitchEnabledRequest(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningRequest) + // @@protoc_insertion_point(class_scope:hbase.pb.IsSwitchEnabledRequest) } - public interface SetNormalizerRunningResponseOrBuilder + public interface IsSwitchEnabledResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional bool prev_normalizer_value = 1; + // required bool enabled = 1; /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - boolean hasPrevNormalizerValue(); + boolean hasEnabled(); /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - boolean getPrevNormalizerValue(); + boolean getEnabled(); } /** - * Protobuf type {@code hbase.pb.SetNormalizerRunningResponse} + * Protobuf type {@code hbase.pb.IsSwitchEnabledResponse} */ - public static final class SetNormalizerRunningResponse extends + public static final class IsSwitchEnabledResponse extends com.google.protobuf.GeneratedMessage - implements SetNormalizerRunningResponseOrBuilder { - // Use SetNormalizerRunningResponse.newBuilder() to construct. - private SetNormalizerRunningResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements IsSwitchEnabledResponseOrBuilder { + // Use IsSwitchEnabledResponse.newBuilder() to construct. + private IsSwitchEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private SetNormalizerRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private IsSwitchEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final SetNormalizerRunningResponse defaultInstance; - public static SetNormalizerRunningResponse getDefaultInstance() { + private static final IsSwitchEnabledResponse defaultInstance; + public static IsSwitchEnabledResponse getDefaultInstance() { return defaultInstance; } - public SetNormalizerRunningResponse getDefaultInstanceForType() { + public IsSwitchEnabledResponse getDefaultInstanceForType() { return defaultInstance; } @@ -30027,7 +30551,7 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private SetNormalizerRunningResponse( + private IsSwitchEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -30052,7 +30576,7 @@ public final class MasterProtos { } case 8: { bitField0_ |= 0x00000001; - prevNormalizerValue_ = input.readBool(); + enabled_ = input.readBool(); break; } } @@ -30069,56 +30593,60 @@ public final class MasterProtos { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSwitchEnabledResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSwitchEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SetNormalizerRunningResponse parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsSwitchEnabledResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new SetNormalizerRunningResponse(input, extensionRegistry); + return new IsSwitchEnabledResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } private int bitField0_; - // optional bool prev_normalizer_value = 1; - public static final int PREV_NORMALIZER_VALUE_FIELD_NUMBER = 1; - private boolean prevNormalizerValue_; + // required bool enabled = 1; + public static final int ENABLED_FIELD_NUMBER = 1; + private boolean enabled_; /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public boolean hasPrevNormalizerValue() { + public boolean hasEnabled() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public boolean getPrevNormalizerValue() { - return prevNormalizerValue_; + public boolean getEnabled() { + return enabled_; } private void initFields() { - prevNormalizerValue_ = false; + enabled_ = false; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; + if (!hasEnabled()) { + memoizedIsInitialized = 0; + return false; + } memoizedIsInitialized = 1; return true; } @@ -30127,7 +30655,7 @@ public final class MasterProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, prevNormalizerValue_); + output.writeBool(1, enabled_); } getUnknownFields().writeTo(output); } @@ -30140,7 +30668,7 @@ public final class MasterProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, prevNormalizerValue_); + .computeBoolSize(1, enabled_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -30159,16 +30687,16 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse) obj; boolean result = true; - result = result && (hasPrevNormalizerValue() == other.hasPrevNormalizerValue()); - if (hasPrevNormalizerValue()) { - result = result && (getPrevNormalizerValue() - == other.getPrevNormalizerValue()); + result = result && (hasEnabled() == other.hasEnabled()); + if (hasEnabled()) { + result = result && (getEnabled() + == other.getEnabled()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -30183,62 +30711,62 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPrevNormalizerValue()) { - hash = (37 * hash) + PREV_NORMALIZER_VALUE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getPrevNormalizerValue()); + if (hasEnabled()) { + hash = (37 * hash) + ENABLED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getEnabled()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -30247,7 +30775,7 @@ public final class MasterProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -30259,24 +30787,24 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.SetNormalizerRunningResponse} + * Protobuf type {@code hbase.pb.IsSwitchEnabledResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSwitchEnabledResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSwitchEnabledResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -30296,7 +30824,7 @@ public final class MasterProtos { public Builder clear() { super.clear(); - prevNormalizerValue_ = false; + enabled_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -30307,53 +30835,57 @@ public final class MasterProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsSwitchEnabledResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.prevNormalizerValue_ = prevNormalizerValue_; + result.enabled_ = enabled_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance()) return this; - if (other.hasPrevNormalizerValue()) { - setPrevNormalizerValue(other.getPrevNormalizerValue()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.getDefaultInstance()) return this; + if (other.hasEnabled()) { + setEnabled(other.getEnabled()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { + if (!hasEnabled()) { + + return false; + } return true; } @@ -30361,11 +30893,11 @@ public final class MasterProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -30376,72 +30908,72 @@ public final class MasterProtos { } private int bitField0_; - // optional bool prev_normalizer_value = 1; - private boolean prevNormalizerValue_ ; + // required bool enabled = 1; + private boolean enabled_ ; /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public boolean hasPrevNormalizerValue() { + public boolean hasEnabled() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public boolean getPrevNormalizerValue() { - return prevNormalizerValue_; + public boolean getEnabled() { + return enabled_; } /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public Builder setPrevNormalizerValue(boolean value) { + public Builder setEnabled(boolean value) { bitField0_ |= 0x00000001; - prevNormalizerValue_ = value; + enabled_ = value; onChanged(); return this; } /** - * optional bool prev_normalizer_value = 1; + * required bool enabled = 1; */ - public Builder clearPrevNormalizerValue() { + public Builder clearEnabled() { bitField0_ = (bitField0_ & ~0x00000001); - prevNormalizerValue_ = false; + enabled_ = false; onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.SetNormalizerRunningResponse) + // @@protoc_insertion_point(builder_scope:hbase.pb.IsSwitchEnabledResponse) } static { - defaultInstance = new SetNormalizerRunningResponse(true); + defaultInstance = new IsSwitchEnabledResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningResponse) + // @@protoc_insertion_point(class_scope:hbase.pb.IsSwitchEnabledResponse) } - public interface IsNormalizerEnabledRequestOrBuilder + public interface NormalizeRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** - * Protobuf type {@code hbase.pb.IsNormalizerEnabledRequest} + * Protobuf type {@code hbase.pb.NormalizeRequest} */ - public static final class IsNormalizerEnabledRequest extends + public static final class NormalizeRequest extends com.google.protobuf.GeneratedMessage - implements IsNormalizerEnabledRequestOrBuilder { - // Use IsNormalizerEnabledRequest.newBuilder() to construct. - private IsNormalizerEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements NormalizeRequestOrBuilder { + // Use NormalizeRequest.newBuilder() to construct. + private NormalizeRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private IsNormalizerEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private NormalizeRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final IsNormalizerEnabledRequest defaultInstance; - public static IsNormalizerEnabledRequest getDefaultInstance() { + private static final NormalizeRequest defaultInstance; + public static NormalizeRequest getDefaultInstance() { return defaultInstance; } - public IsNormalizerEnabledRequest getDefaultInstanceForType() { + public NormalizeRequest getDefaultInstanceForType() { return defaultInstance; } @@ -30451,7 +30983,7 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private IsNormalizerEnabledRequest( + private NormalizeRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -30487,28 +31019,28 @@ public final class MasterProtos { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IsNormalizerEnabledRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public NormalizeRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new IsNormalizerEnabledRequest(input, extensionRegistry); + return new NormalizeRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -30552,10 +31084,10 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) obj; boolean result = true; result = result && @@ -30576,53 +31108,53 @@ public final class MasterProtos { return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -30631,7 +31163,7 @@ public final class MasterProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -30643,24 +31175,24 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.IsNormalizerEnabledRequest} + * Protobuf type {@code hbase.pb.NormalizeRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -30689,38 +31221,38 @@ public final class MasterProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeRequest_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -30733,11 +31265,11 @@ public final class MasterProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -30747,49 +31279,49 @@ public final class MasterProtos { return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.IsNormalizerEnabledRequest) + // @@protoc_insertion_point(builder_scope:hbase.pb.NormalizeRequest) } static { - defaultInstance = new IsNormalizerEnabledRequest(true); + defaultInstance = new NormalizeRequest(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.IsNormalizerEnabledRequest) + // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeRequest) } - public interface IsNormalizerEnabledResponseOrBuilder + public interface NormalizeResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required bool enabled = 1; + // required bool normalizer_ran = 1; /** - * required bool enabled = 1; + * required bool normalizer_ran = 1; */ - boolean hasEnabled(); + boolean hasNormalizerRan(); /** - * required bool enabled = 1; + * required bool normalizer_ran = 1; */ - boolean getEnabled(); + boolean getNormalizerRan(); } /** - * Protobuf type {@code hbase.pb.IsNormalizerEnabledResponse} + * Protobuf type {@code hbase.pb.NormalizeResponse} */ - public static final class IsNormalizerEnabledResponse extends + public static final class NormalizeResponse extends com.google.protobuf.GeneratedMessage - implements IsNormalizerEnabledResponseOrBuilder { - // Use IsNormalizerEnabledResponse.newBuilder() to construct. - private IsNormalizerEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements NormalizeResponseOrBuilder { + // Use NormalizeResponse.newBuilder() to construct. + private NormalizeResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private IsNormalizerEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private NormalizeResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final IsNormalizerEnabledResponse defaultInstance; - public static IsNormalizerEnabledResponse getDefaultInstance() { + private static final NormalizeResponse defaultInstance; + public static NormalizeResponse getDefaultInstance() { return defaultInstance; } - public IsNormalizerEnabledResponse getDefaultInstanceForType() { + public NormalizeResponse getDefaultInstanceForType() { return defaultInstance; } @@ -30799,7 +31331,1663 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private IsNormalizerEnabledResponse( + private NormalizeResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + normalizerRan_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public NormalizeResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NormalizeResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bool normalizer_ran = 1; + public static final int NORMALIZER_RAN_FIELD_NUMBER = 1; + private boolean normalizerRan_; + /** + * required bool normalizer_ran = 1; + */ + public boolean hasNormalizerRan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool normalizer_ran = 1; + */ + public boolean getNormalizerRan() { + return normalizerRan_; + } + + private void initFields() { + normalizerRan_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasNormalizerRan()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, normalizerRan_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, normalizerRan_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) obj; + + boolean result = true; + result = result && (hasNormalizerRan() == other.hasNormalizerRan()); + if (hasNormalizerRan()) { + result = result && (getNormalizerRan() + == other.getNormalizerRan()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasNormalizerRan()) { + hash = (37 * hash) + NORMALIZER_RAN_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getNormalizerRan()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.NormalizeResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + normalizerRan_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_NormalizeResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.normalizerRan_ = normalizerRan_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance()) return this; + if (other.hasNormalizerRan()) { + setNormalizerRan(other.getNormalizerRan()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasNormalizerRan()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bool normalizer_ran = 1; + private boolean normalizerRan_ ; + /** + * required bool normalizer_ran = 1; + */ + public boolean hasNormalizerRan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool normalizer_ran = 1; + */ + public boolean getNormalizerRan() { + return normalizerRan_; + } + /** + * required bool normalizer_ran = 1; + */ + public Builder setNormalizerRan(boolean value) { + bitField0_ |= 0x00000001; + normalizerRan_ = value; + onChanged(); + return this; + } + /** + * required bool normalizer_ran = 1; + */ + public Builder clearNormalizerRan() { + bitField0_ = (bitField0_ & ~0x00000001); + normalizerRan_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.NormalizeResponse) + } + + static { + defaultInstance = new NormalizeResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.NormalizeResponse) + } + + public interface SetNormalizerRunningRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool on = 1; + /** + * required bool on = 1; + */ + boolean hasOn(); + /** + * required bool on = 1; + */ + boolean getOn(); + } + /** + * Protobuf type {@code hbase.pb.SetNormalizerRunningRequest} + */ + public static final class SetNormalizerRunningRequest extends + com.google.protobuf.GeneratedMessage + implements SetNormalizerRunningRequestOrBuilder { + // Use SetNormalizerRunningRequest.newBuilder() to construct. + private SetNormalizerRunningRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SetNormalizerRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SetNormalizerRunningRequest defaultInstance; + public static SetNormalizerRunningRequest getDefaultInstance() { + return defaultInstance; + } + + public SetNormalizerRunningRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SetNormalizerRunningRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + on_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetNormalizerRunningRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetNormalizerRunningRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bool on = 1; + public static final int ON_FIELD_NUMBER = 1; + private boolean on_; + /** + * required bool on = 1; + */ + public boolean hasOn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool on = 1; + */ + public boolean getOn() { + return on_; + } + + private void initFields() { + on_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasOn()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, on_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, on_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) obj; + + boolean result = true; + result = result && (hasOn() == other.hasOn()); + if (hasOn()) { + result = result && (getOn() + == other.getOn()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasOn()) { + hash = (37 * hash) + ON_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getOn()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.SetNormalizerRunningRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + on_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.on_ = on_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance()) return this; + if (other.hasOn()) { + setOn(other.getOn()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasOn()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bool on = 1; + private boolean on_ ; + /** + * required bool on = 1; + */ + public boolean hasOn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool on = 1; + */ + public boolean getOn() { + return on_; + } + /** + * required bool on = 1; + */ + public Builder setOn(boolean value) { + bitField0_ |= 0x00000001; + on_ = value; + onChanged(); + return this; + } + /** + * required bool on = 1; + */ + public Builder clearOn() { + bitField0_ = (bitField0_ & ~0x00000001); + on_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.SetNormalizerRunningRequest) + } + + static { + defaultInstance = new SetNormalizerRunningRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningRequest) + } + + public interface SetNormalizerRunningResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bool prev_normalizer_value = 1; + /** + * optional bool prev_normalizer_value = 1; + */ + boolean hasPrevNormalizerValue(); + /** + * optional bool prev_normalizer_value = 1; + */ + boolean getPrevNormalizerValue(); + } + /** + * Protobuf type {@code hbase.pb.SetNormalizerRunningResponse} + */ + public static final class SetNormalizerRunningResponse extends + com.google.protobuf.GeneratedMessage + implements SetNormalizerRunningResponseOrBuilder { + // Use SetNormalizerRunningResponse.newBuilder() to construct. + private SetNormalizerRunningResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SetNormalizerRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SetNormalizerRunningResponse defaultInstance; + public static SetNormalizerRunningResponse getDefaultInstance() { + return defaultInstance; + } + + public SetNormalizerRunningResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SetNormalizerRunningResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + prevNormalizerValue_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetNormalizerRunningResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetNormalizerRunningResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional bool prev_normalizer_value = 1; + public static final int PREV_NORMALIZER_VALUE_FIELD_NUMBER = 1; + private boolean prevNormalizerValue_; + /** + * optional bool prev_normalizer_value = 1; + */ + public boolean hasPrevNormalizerValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional bool prev_normalizer_value = 1; + */ + public boolean getPrevNormalizerValue() { + return prevNormalizerValue_; + } + + private void initFields() { + prevNormalizerValue_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, prevNormalizerValue_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, prevNormalizerValue_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) obj; + + boolean result = true; + result = result && (hasPrevNormalizerValue() == other.hasPrevNormalizerValue()); + if (hasPrevNormalizerValue()) { + result = result && (getPrevNormalizerValue() + == other.getPrevNormalizerValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPrevNormalizerValue()) { + hash = (37 * hash) + PREV_NORMALIZER_VALUE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getPrevNormalizerValue()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.SetNormalizerRunningResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + prevNormalizerValue_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.prevNormalizerValue_ = prevNormalizerValue_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance()) return this; + if (other.hasPrevNormalizerValue()) { + setPrevNormalizerValue(other.getPrevNormalizerValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional bool prev_normalizer_value = 1; + private boolean prevNormalizerValue_ ; + /** + * optional bool prev_normalizer_value = 1; + */ + public boolean hasPrevNormalizerValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional bool prev_normalizer_value = 1; + */ + public boolean getPrevNormalizerValue() { + return prevNormalizerValue_; + } + /** + * optional bool prev_normalizer_value = 1; + */ + public Builder setPrevNormalizerValue(boolean value) { + bitField0_ |= 0x00000001; + prevNormalizerValue_ = value; + onChanged(); + return this; + } + /** + * optional bool prev_normalizer_value = 1; + */ + public Builder clearPrevNormalizerValue() { + bitField0_ = (bitField0_ & ~0x00000001); + prevNormalizerValue_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.SetNormalizerRunningResponse) + } + + static { + defaultInstance = new SetNormalizerRunningResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.SetNormalizerRunningResponse) + } + + public interface IsNormalizerEnabledRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code hbase.pb.IsNormalizerEnabledRequest} + */ + public static final class IsNormalizerEnabledRequest extends + com.google.protobuf.GeneratedMessage + implements IsNormalizerEnabledRequestOrBuilder { + // Use IsNormalizerEnabledRequest.newBuilder() to construct. + private IsNormalizerEnabledRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private IsNormalizerEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final IsNormalizerEnabledRequest defaultInstance; + public static IsNormalizerEnabledRequest getDefaultInstance() { + return defaultInstance; + } + + public IsNormalizerEnabledRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsNormalizerEnabledRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public IsNormalizerEnabledRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new IsNormalizerEnabledRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.IsNormalizerEnabledRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.IsNormalizerEnabledRequest) + } + + static { + defaultInstance = new IsNormalizerEnabledRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.IsNormalizerEnabledRequest) + } + + public interface IsNormalizerEnabledResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool enabled = 1; + /** + * required bool enabled = 1; + */ + boolean hasEnabled(); + /** + * required bool enabled = 1; + */ + boolean getEnabled(); + } + /** + * Protobuf type {@code hbase.pb.IsNormalizerEnabledResponse} + */ + public static final class IsNormalizerEnabledResponse extends + com.google.protobuf.GeneratedMessage + implements IsNormalizerEnabledResponseOrBuilder { + // Use IsNormalizerEnabledResponse.newBuilder() to construct. + private IsNormalizerEnabledResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private IsNormalizerEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final IsNormalizerEnabledResponse defaultInstance; + public static IsNormalizerEnabledResponse getDefaultInstance() { + return defaultInstance; + } + + public IsNormalizerEnabledResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private IsNormalizerEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -57338,6 +59526,33 @@ public final class MasterProtos { com.google.protobuf.RpcCallback done); /** + * rpc SetSwitches(.hbase.pb.SetSwitchesRequest) returns (.hbase.pb.SetSwitchesResponse); + * + *
+       **
+       * Turn the switches on or off.
+       * If synchronous is true, it waits until current operation call, if outstanding, to return.
+       * 
+ */ + public abstract void setSwitches( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc IsSwitchEnabled(.hbase.pb.IsSwitchEnabledRequest) returns (.hbase.pb.IsSwitchEnabledResponse); + * + *
+       **
+       * Query whether the switch is on/off.
+       * 
+ */ + public abstract void isSwitchEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest request, + com.google.protobuf.RpcCallback done); + + /** * rpc Normalize(.hbase.pb.NormalizeRequest) returns (.hbase.pb.NormalizeResponse); * *
@@ -57919,6 +60134,22 @@ public final class MasterProtos {
         }
 
         @java.lang.Override
+        public  void setSwitches(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest request,
+            com.google.protobuf.RpcCallback done) {
+          impl.setSwitches(controller, request, done);
+        }
+
+        @java.lang.Override
+        public  void isSwitchEnabled(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest request,
+            com.google.protobuf.RpcCallback done) {
+          impl.isSwitchEnabled(controller, request, done);
+        }
+
+        @java.lang.Override
         public  void normalize(
             com.google.protobuf.RpcController controller,
             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request,
@@ -58237,66 +60468,70 @@ public final class MasterProtos {
             case 23:
               return impl.isBalancerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest)request);
             case 24:
-              return impl.normalize(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)request);
+              return impl.setSwitches(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest)request);
             case 25:
-              return impl.setNormalizerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)request);
+              return impl.isSwitchEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest)request);
             case 26:
-              return impl.isNormalizerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)request);
+              return impl.normalize(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)request);
             case 27:
-              return impl.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)request);
+              return impl.setNormalizerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)request);
             case 28:
-              return impl.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)request);
+              return impl.isNormalizerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)request);
             case 29:
-              return impl.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)request);
+              return impl.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)request);
             case 30:
-              return impl.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
+              return impl.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)request);
             case 31:
-              return impl.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)request);
+              return impl.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)request);
             case 32:
-              return impl.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request);
+              return impl.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
             case 33:
-              return impl.deleteSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request);
+              return impl.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)request);
             case 34:
-              return impl.isSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request);
+              return impl.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request);
             case 35:
-              return impl.restoreSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request);
+              return impl.deleteSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request);
             case 36:
-              return impl.isRestoreSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)request);
+              return impl.isSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request);
             case 37:
-              return impl.execProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request);
+              return impl.restoreSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request);
             case 38:
-              return impl.execProcedureWithRet(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request);
+              return impl.isRestoreSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)request);
             case 39:
-              return impl.isProcedureDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request);
+              return impl.execProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request);
             case 40:
-              return impl.modifyNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request);
+              return impl.execProcedureWithRet(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request);
             case 41:
-              return impl.createNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)request);
+              return impl.isProcedureDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request);
             case 42:
-              return impl.deleteNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request);
+              return impl.modifyNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request);
             case 43:
-              return impl.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request);
+              return impl.createNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)request);
             case 44:
-              return impl.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request);
+              return impl.deleteNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request);
             case 45:
-              return impl.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request);
+              return impl.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request);
             case 46:
-              return impl.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request);
+              return impl.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request);
             case 47:
-              return impl.getTableState(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest)request);
+              return impl.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request);
             case 48:
-              return impl.setQuota(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest)request);
+              return impl.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request);
             case 49:
-              return impl.getLastMajorCompactionTimestamp(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)request);
+              return impl.getTableState(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest)request);
             case 50:
-              return impl.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request);
+              return impl.setQuota(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest)request);
             case 51:
-              return impl.getProcedureResult(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest)request);
+              return impl.getLastMajorCompactionTimestamp(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)request);
             case 52:
-              return impl.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request);
+              return impl.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request);
             case 53:
-              return impl.abortProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)request);
+              return impl.getProcedureResult(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest)request);
             case 54:
+              return impl.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request);
+            case 55:
+              return impl.abortProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)request);
+            case 56:
               return impl.listProcedures(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)request);
             default:
               throw new java.lang.AssertionError("Can't get here.");
@@ -58361,66 +60596,70 @@ public final class MasterProtos {
             case 23:
               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.getDefaultInstance();
             case 24:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest.getDefaultInstance();
             case 25:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest.getDefaultInstance();
             case 26:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance();
             case 27:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance();
             case 28:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance();
             case 29:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance();
             case 30:
-              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance();
             case 31:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance();
             case 32:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
             case 33:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance();
             case 34:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance();
             case 35:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance();
             case 36:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance();
             case 37:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance();
             case 38:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance();
             case 39:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
             case 40:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
             case 41:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance();
             case 42:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance();
             case 43:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance();
             case 44:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance();
             case 45:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance();
             case 46:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance();
             case 47:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance();
             case 48:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance();
             case 49:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance();
             case 50:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance();
             case 51:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance();
             case 52:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
             case 53:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
             case 54:
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
+            case 55:
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+            case 56:
               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
             default:
               throw new java.lang.AssertionError("Can't get here.");
@@ -58485,66 +60724,70 @@ public final class MasterProtos {
             case 23:
               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance();
             case 24:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.getDefaultInstance();
             case 25:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.getDefaultInstance();
             case 26:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance();
             case 27:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance();
             case 28:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance();
             case 29:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance();
             case 30:
-              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance();
             case 31:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance();
             case 32:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
             case 33:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance();
             case 34:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance();
             case 35:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance();
             case 36:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance();
             case 37:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance();
             case 38:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance();
             case 39:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
             case 40:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
             case 41:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance();
             case 42:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance();
             case 43:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance();
             case 44:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance();
             case 45:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance();
             case 46:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance();
             case 47:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance();
             case 48:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance();
             case 49:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance();
             case 50:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance();
             case 51:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
             case 52:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
             case 53:
-              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
             case 54:
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
+            case 55:
+              return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+            case 56:
               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
             default:
               throw new java.lang.AssertionError("Can't get here.");
@@ -58857,6 +61100,33 @@ public final class MasterProtos {
         com.google.protobuf.RpcCallback done);
 
     /**
+     * rpc SetSwitches(.hbase.pb.SetSwitchesRequest) returns (.hbase.pb.SetSwitchesResponse);
+     *
+     * 
+     **
+     * Turn the switches on or off.
+     * If synchronous is true, it waits until current operation call, if outstanding, to return.
+     * 
+ */ + public abstract void setSwitches( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc IsSwitchEnabled(.hbase.pb.IsSwitchEnabledRequest) returns (.hbase.pb.IsSwitchEnabledResponse); + * + *
+     **
+     * Query whether the switch is on/off.
+     * 
+ */ + public abstract void isSwitchEnabled( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest request, + com.google.protobuf.RpcCallback done); + + /** * rpc Normalize(.hbase.pb.NormalizeRequest) returns (.hbase.pb.NormalizeResponse); * *
@@ -59383,156 +61653,166 @@ public final class MasterProtos {
               done));
           return;
         case 24:
+          this.setSwitches(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest)request,
+            com.google.protobuf.RpcUtil.specializeCallback(
+              done));
+          return;
+        case 25:
+          this.isSwitchEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest)request,
+            com.google.protobuf.RpcUtil.specializeCallback(
+              done));
+          return;
+        case 26:
           this.normalize(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 25:
+        case 27:
           this.setNormalizerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 26:
+        case 28:
           this.isNormalizerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 27:
+        case 29:
           this.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 28:
+        case 30:
           this.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 29:
+        case 31:
           this.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 30:
+        case 32:
           this.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 31:
+        case 33:
           this.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 32:
+        case 34:
           this.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 33:
+        case 35:
           this.deleteSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 34:
+        case 36:
           this.isSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 35:
+        case 37:
           this.restoreSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 36:
+        case 38:
           this.isRestoreSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 37:
+        case 39:
           this.execProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 38:
+        case 40:
           this.execProcedureWithRet(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 39:
+        case 41:
           this.isProcedureDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 40:
+        case 42:
           this.modifyNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 41:
+        case 43:
           this.createNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 42:
+        case 44:
           this.deleteNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 43:
+        case 45:
           this.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 44:
+        case 46:
           this.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 45:
+        case 47:
           this.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 46:
+        case 48:
           this.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 47:
+        case 49:
           this.getTableState(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 48:
+        case 50:
           this.setQuota(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 49:
+        case 51:
           this.getLastMajorCompactionTimestamp(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 50:
+        case 52:
           this.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 51:
+        case 53:
           this.getProcedureResult(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 52:
+        case 54:
           this.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 53:
+        case 55:
           this.abortProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
-        case 54:
+        case 56:
           this.listProcedures(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
@@ -59600,66 +61880,70 @@ public final class MasterProtos {
         case 23:
           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.getDefaultInstance();
         case 24:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest.getDefaultInstance();
         case 25:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest.getDefaultInstance();
         case 26:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance();
         case 27:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance();
         case 28:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance();
         case 29:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance();
         case 30:
-          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance();
         case 31:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance();
         case 32:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
         case 33:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance();
         case 34:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance();
         case 35:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance();
         case 36:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance();
         case 37:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance();
         case 38:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance();
         case 39:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
         case 40:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
         case 41:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance();
         case 42:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance();
         case 43:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance();
         case 44:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance();
         case 45:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance();
         case 46:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance();
         case 47:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance();
         case 48:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance();
         case 49:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance();
         case 50:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance();
         case 51:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance();
         case 52:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
         case 53:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
         case 54:
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
+        case 55:
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+        case 56:
           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
         default:
           throw new java.lang.AssertionError("Can't get here.");
@@ -59724,66 +62008,70 @@ public final class MasterProtos {
         case 23:
           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance();
         case 24:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.getDefaultInstance();
         case 25:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.getDefaultInstance();
         case 26:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance();
         case 27:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance();
         case 28:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance();
         case 29:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance();
         case 30:
-          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance();
         case 31:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance();
         case 32:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
         case 33:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance();
         case 34:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance();
         case 35:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance();
         case 36:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance();
         case 37:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance();
         case 38:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance();
         case 39:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
         case 40:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
         case 41:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance();
         case 42:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance();
         case 43:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance();
         case 44:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance();
         case 45:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance();
         case 46:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance();
         case 47:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance();
         case 48:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance();
         case 49:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance();
         case 50:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance();
         case 51:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
         case 52:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
         case 53:
-          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
         case 54:
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
+        case 55:
+          return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+        case 56:
           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
         default:
           throw new java.lang.AssertionError("Can't get here.");
@@ -60166,12 +62454,42 @@ public final class MasterProtos {
             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance()));
       }
 
+      public  void setSwitches(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest request,
+          com.google.protobuf.RpcCallback done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(24),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.getDefaultInstance()));
+      }
+
+      public  void isSwitchEnabled(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest request,
+          com.google.protobuf.RpcCallback done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(25),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.getDefaultInstance()));
+      }
+
       public  void normalize(
           com.google.protobuf.RpcController controller,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(24),
+          getDescriptor().getMethods().get(26),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance(),
@@ -60186,7 +62504,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(25),
+          getDescriptor().getMethods().get(27),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance(),
@@ -60201,7 +62519,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(26),
+          getDescriptor().getMethods().get(28),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance(),
@@ -60216,7 +62534,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(27),
+          getDescriptor().getMethods().get(29),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance(),
@@ -60231,7 +62549,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(28),
+          getDescriptor().getMethods().get(30),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance(),
@@ -60246,7 +62564,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(29),
+          getDescriptor().getMethods().get(31),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(),
@@ -60261,7 +62579,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(30),
+          getDescriptor().getMethods().get(32),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
@@ -60276,7 +62594,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(31),
+          getDescriptor().getMethods().get(33),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(),
@@ -60291,7 +62609,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(32),
+          getDescriptor().getMethods().get(34),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(),
@@ -60306,7 +62624,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(33),
+          getDescriptor().getMethods().get(35),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(),
@@ -60321,7 +62639,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(34),
+          getDescriptor().getMethods().get(36),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(),
@@ -60336,7 +62654,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(35),
+          getDescriptor().getMethods().get(37),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(),
@@ -60351,7 +62669,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(36),
+          getDescriptor().getMethods().get(38),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance(),
@@ -60366,7 +62684,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(37),
+          getDescriptor().getMethods().get(39),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(),
@@ -60381,7 +62699,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(38),
+          getDescriptor().getMethods().get(40),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(),
@@ -60396,7 +62714,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(39),
+          getDescriptor().getMethods().get(41),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(),
@@ -60411,7 +62729,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(40),
+          getDescriptor().getMethods().get(42),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(),
@@ -60426,7 +62744,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(41),
+          getDescriptor().getMethods().get(43),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(),
@@ -60441,7 +62759,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(42),
+          getDescriptor().getMethods().get(44),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(),
@@ -60456,7 +62774,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(43),
+          getDescriptor().getMethods().get(45),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(),
@@ -60471,7 +62789,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(44),
+          getDescriptor().getMethods().get(46),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(),
@@ -60486,7 +62804,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(45),
+          getDescriptor().getMethods().get(47),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(),
@@ -60501,7 +62819,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(46),
+          getDescriptor().getMethods().get(48),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(),
@@ -60516,7 +62834,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(47),
+          getDescriptor().getMethods().get(49),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance(),
@@ -60531,7 +62849,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(48),
+          getDescriptor().getMethods().get(50),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance(),
@@ -60546,7 +62864,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(49),
+          getDescriptor().getMethods().get(51),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(),
@@ -60561,7 +62879,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(50),
+          getDescriptor().getMethods().get(52),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(),
@@ -60576,7 +62894,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(51),
+          getDescriptor().getMethods().get(53),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance(),
@@ -60591,7 +62909,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(52),
+          getDescriptor().getMethods().get(54),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance(),
@@ -60606,7 +62924,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(53),
+          getDescriptor().getMethods().get(55),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance(),
@@ -60621,7 +62939,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
-          getDescriptor().getMethods().get(54),
+          getDescriptor().getMethods().get(56),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(),
@@ -60758,6 +63076,16 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest request)
           throws com.google.protobuf.ServiceException;
 
+      public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse setSwitches(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest request)
+          throws com.google.protobuf.ServiceException;
+
+      public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse isSwitchEnabled(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest request)
+          throws com.google.protobuf.ServiceException;
+
       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse normalize(
           com.google.protobuf.RpcController controller,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request)
@@ -61209,12 +63537,36 @@ public final class MasterProtos {
       }
 
 
+      public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse setSwitches(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesRequest request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(24),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSwitchesResponse.getDefaultInstance());
+      }
+
+
+      public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse isSwitchEnabled(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledRequest request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(25),
+          controller,
+          request,
+          org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSwitchEnabledResponse.getDefaultInstance());
+      }
+
+
       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse normalize(
           com.google.protobuf.RpcController controller,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(24),
+          getDescriptor().getMethods().get(26),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance());
@@ -61226,7 +63578,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(25),
+          getDescriptor().getMethods().get(27),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance());
@@ -61238,7 +63590,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(26),
+          getDescriptor().getMethods().get(28),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance());
@@ -61250,7 +63602,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(27),
+          getDescriptor().getMethods().get(29),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance());
@@ -61262,7 +63614,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(28),
+          getDescriptor().getMethods().get(30),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance());
@@ -61274,7 +63626,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(29),
+          getDescriptor().getMethods().get(31),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance());
@@ -61286,7 +63638,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(30),
+          getDescriptor().getMethods().get(32),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
@@ -61298,7 +63650,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(31),
+          getDescriptor().getMethods().get(33),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance());
@@ -61310,7 +63662,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(32),
+          getDescriptor().getMethods().get(34),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance());
@@ -61322,7 +63674,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(33),
+          getDescriptor().getMethods().get(35),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance());
@@ -61334,7 +63686,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(34),
+          getDescriptor().getMethods().get(36),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance());
@@ -61346,7 +63698,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(35),
+          getDescriptor().getMethods().get(37),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance());
@@ -61358,7 +63710,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(36),
+          getDescriptor().getMethods().get(38),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance());
@@ -61370,7 +63722,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(37),
+          getDescriptor().getMethods().get(39),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance());
@@ -61382,7 +63734,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(38),
+          getDescriptor().getMethods().get(40),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance());
@@ -61394,7 +63746,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(39),
+          getDescriptor().getMethods().get(41),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance());
@@ -61406,7 +63758,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(40),
+          getDescriptor().getMethods().get(42),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance());
@@ -61418,7 +63770,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(41),
+          getDescriptor().getMethods().get(43),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance());
@@ -61430,7 +63782,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(42),
+          getDescriptor().getMethods().get(44),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance());
@@ -61442,7 +63794,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(43),
+          getDescriptor().getMethods().get(45),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance());
@@ -61454,7 +63806,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(44),
+          getDescriptor().getMethods().get(46),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance());
@@ -61466,7 +63818,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(45),
+          getDescriptor().getMethods().get(47),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance());
@@ -61478,7 +63830,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(46),
+          getDescriptor().getMethods().get(48),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance());
@@ -61490,7 +63842,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(47),
+          getDescriptor().getMethods().get(49),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance());
@@ -61502,7 +63854,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(48),
+          getDescriptor().getMethods().get(50),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance());
@@ -61514,7 +63866,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(49),
+          getDescriptor().getMethods().get(51),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance());
@@ -61526,7 +63878,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(50),
+          getDescriptor().getMethods().get(52),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance());
@@ -61538,7 +63890,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(51),
+          getDescriptor().getMethods().get(53),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance());
@@ -61550,7 +63902,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(52),
+          getDescriptor().getMethods().get(54),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance());
@@ -61562,7 +63914,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(53),
+          getDescriptor().getMethods().get(55),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance());
@@ -61574,7 +63926,7 @@ public final class MasterProtos {
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) channel.callBlockingMethod(
-          getDescriptor().getMethods().get(54),
+          getDescriptor().getMethods().get(56),
           controller,
           request,
           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance());
@@ -61846,6 +64198,26 @@ public final class MasterProtos {
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
       internal_static_hbase_pb_IsBalancerEnabledResponse_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_SetSwitchesRequest_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hbase_pb_SetSwitchesRequest_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_SetSwitchesResponse_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hbase_pb_SetSwitchesResponse_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_IsSwitchEnabledRequest_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hbase_pb_IsSwitchEnabledRequest_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_IsSwitchEnabledResponse_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hbase_pb_IsSwitchEnabledResponse_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
     internal_static_hbase_pb_NormalizeRequest_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
@@ -62211,225 +64583,237 @@ public final class MasterProtos {
       "ncerRunningResponse\022\032\n\022prev_balance_valu",
       "e\030\001 \001(\010\"\032\n\030IsBalancerEnabledRequest\",\n\031I" +
       "sBalancerEnabledResponse\022\017\n\007enabled\030\001 \002(" +
-      "\010\"\022\n\020NormalizeRequest\"+\n\021NormalizeRespon" +
-      "se\022\026\n\016normalizer_ran\030\001 \002(\010\")\n\033SetNormali" +
-      "zerRunningRequest\022\n\n\002on\030\001 \002(\010\"=\n\034SetNorm" +
-      "alizerRunningResponse\022\035\n\025prev_normalizer" +
-      "_value\030\001 \001(\010\"\034\n\032IsNormalizerEnabledReque" +
-      "st\".\n\033IsNormalizerEnabledResponse\022\017\n\007ena" +
-      "bled\030\001 \002(\010\"\027\n\025RunCatalogScanRequest\"-\n\026R" +
-      "unCatalogScanResponse\022\023\n\013scan_result\030\001 \001",
-      "(\005\"-\n\033EnableCatalogJanitorRequest\022\016\n\006ena" +
-      "ble\030\001 \002(\010\"2\n\034EnableCatalogJanitorRespons" +
-      "e\022\022\n\nprev_value\030\001 \001(\010\" \n\036IsCatalogJanito" +
-      "rEnabledRequest\"0\n\037IsCatalogJanitorEnabl" +
-      "edResponse\022\r\n\005value\030\001 \002(\010\"B\n\017SnapshotReq" +
-      "uest\022/\n\010snapshot\030\001 \002(\0132\035.hbase.pb.Snapsh" +
-      "otDescription\",\n\020SnapshotResponse\022\030\n\020exp" +
-      "ected_timeout\030\001 \002(\003\"\036\n\034GetCompletedSnaps" +
-      "hotsRequest\"Q\n\035GetCompletedSnapshotsResp" +
-      "onse\0220\n\tsnapshots\030\001 \003(\0132\035.hbase.pb.Snaps",
-      "hotDescription\"H\n\025DeleteSnapshotRequest\022" +
-      "/\n\010snapshot\030\001 \002(\0132\035.hbase.pb.SnapshotDes" +
-      "cription\"\030\n\026DeleteSnapshotResponse\"I\n\026Re" +
-      "storeSnapshotRequest\022/\n\010snapshot\030\001 \002(\0132\035" +
-      ".hbase.pb.SnapshotDescription\"\031\n\027Restore" +
-      "SnapshotResponse\"H\n\025IsSnapshotDoneReques" +
-      "t\022/\n\010snapshot\030\001 \001(\0132\035.hbase.pb.SnapshotD" +
-      "escription\"^\n\026IsSnapshotDoneResponse\022\023\n\004" +
-      "done\030\001 \001(\010:\005false\022/\n\010snapshot\030\002 \001(\0132\035.hb" +
-      "ase.pb.SnapshotDescription\"O\n\034IsRestoreS",
-      "napshotDoneRequest\022/\n\010snapshot\030\001 \001(\0132\035.h" +
-      "base.pb.SnapshotDescription\"4\n\035IsRestore" +
-      "SnapshotDoneResponse\022\023\n\004done\030\001 \001(\010:\005fals" +
-      "e\"F\n\033GetSchemaAlterStatusRequest\022\'\n\ntabl" +
-      "e_name\030\001 \002(\0132\023.hbase.pb.TableName\"T\n\034Get" +
-      "SchemaAlterStatusResponse\022\035\n\025yet_to_upda" +
-      "te_regions\030\001 \001(\r\022\025\n\rtotal_regions\030\002 \001(\r\"" +
-      "\213\001\n\032GetTableDescriptorsRequest\022(\n\013table_" +
-      "names\030\001 \003(\0132\023.hbase.pb.TableName\022\r\n\005rege" +
-      "x\030\002 \001(\t\022!\n\022include_sys_tables\030\003 \001(\010:\005fal",
-      "se\022\021\n\tnamespace\030\004 \001(\t\"J\n\033GetTableDescrip" +
-      "torsResponse\022+\n\014table_schema\030\001 \003(\0132\025.hba" +
-      "se.pb.TableSchema\"[\n\024GetTableNamesReques" +
-      "t\022\r\n\005regex\030\001 \001(\t\022!\n\022include_sys_tables\030\002" +
-      " \001(\010:\005false\022\021\n\tnamespace\030\003 \001(\t\"A\n\025GetTab" +
-      "leNamesResponse\022(\n\013table_names\030\001 \003(\0132\023.h" +
-      "base.pb.TableName\"?\n\024GetTableStateReques" +
-      "t\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.TableNa" +
-      "me\"B\n\025GetTableStateResponse\022)\n\013table_sta" +
-      "te\030\001 \002(\0132\024.hbase.pb.TableState\"\031\n\027GetClu",
-      "sterStatusRequest\"K\n\030GetClusterStatusRes" +
-      "ponse\022/\n\016cluster_status\030\001 \002(\0132\027.hbase.pb" +
-      ".ClusterStatus\"\030\n\026IsMasterRunningRequest" +
-      "\"4\n\027IsMasterRunningResponse\022\031\n\021is_master" +
-      "_running\030\001 \002(\010\"I\n\024ExecProcedureRequest\0221" +
-      "\n\tprocedure\030\001 \002(\0132\036.hbase.pb.ProcedureDe" +
-      "scription\"F\n\025ExecProcedureResponse\022\030\n\020ex" +
-      "pected_timeout\030\001 \001(\003\022\023\n\013return_data\030\002 \001(" +
-      "\014\"K\n\026IsProcedureDoneRequest\0221\n\tprocedure" +
-      "\030\001 \001(\0132\036.hbase.pb.ProcedureDescription\"`",
-      "\n\027IsProcedureDoneResponse\022\023\n\004done\030\001 \001(\010:" +
-      "\005false\0220\n\010snapshot\030\002 \001(\0132\036.hbase.pb.Proc" +
-      "edureDescription\",\n\031GetProcedureResultRe" +
-      "quest\022\017\n\007proc_id\030\001 \002(\004\"\371\001\n\032GetProcedureR" +
-      "esultResponse\0229\n\005state\030\001 \002(\0162*.hbase.pb." +
-      "GetProcedureResultResponse.State\022\022\n\nstar" +
-      "t_time\030\002 \001(\004\022\023\n\013last_update\030\003 \001(\004\022\016\n\006res" +
-      "ult\030\004 \001(\014\0224\n\texception\030\005 \001(\0132!.hbase.pb." +
-      "ForeignExceptionMessage\"1\n\005State\022\r\n\tNOT_" +
-      "FOUND\020\000\022\013\n\007RUNNING\020\001\022\014\n\010FINISHED\020\002\"M\n\025Ab",
-      "ortProcedureRequest\022\017\n\007proc_id\030\001 \002(\004\022#\n\025" +
-      "mayInterruptIfRunning\030\002 \001(\010:\004true\"6\n\026Abo" +
-      "rtProcedureResponse\022\034\n\024is_procedure_abor" +
-      "ted\030\001 \002(\010\"\027\n\025ListProceduresRequest\"@\n\026Li" +
-      "stProceduresResponse\022&\n\tprocedure\030\001 \003(\0132" +
-      "\023.hbase.pb.Procedure\"\315\001\n\017SetQuotaRequest" +
-      "\022\021\n\tuser_name\030\001 \001(\t\022\022\n\nuser_group\030\002 \001(\t\022" +
-      "\021\n\tnamespace\030\003 \001(\t\022\'\n\ntable_name\030\004 \001(\0132\023" +
-      ".hbase.pb.TableName\022\022\n\nremove_all\030\005 \001(\010\022" +
-      "\026\n\016bypass_globals\030\006 \001(\010\022+\n\010throttle\030\007 \001(",
-      "\0132\031.hbase.pb.ThrottleRequest\"\022\n\020SetQuota" +
-      "Response\"J\n\037MajorCompactionTimestampRequ" +
-      "est\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Table" +
-      "Name\"U\n(MajorCompactionTimestampForRegio" +
-      "nRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.Regi" +
-      "onSpecifier\"@\n MajorCompactionTimestampR" +
-      "esponse\022\034\n\024compaction_timestamp\030\001 \002(\003\"\035\n" +
-      "\033SecurityCapabilitiesRequest\"\354\001\n\034Securit" +
-      "yCapabilitiesResponse\022G\n\014capabilities\030\001 " +
-      "\003(\01621.hbase.pb.SecurityCapabilitiesRespo",
-      "nse.Capability\"\202\001\n\nCapability\022\031\n\025SIMPLE_" +
-      "AUTHENTICATION\020\000\022\031\n\025SECURE_AUTHENTICATIO" +
-      "N\020\001\022\021\n\rAUTHORIZATION\020\002\022\026\n\022CELL_AUTHORIZA" +
-      "TION\020\003\022\023\n\017CELL_VISIBILITY\020\0042\374&\n\rMasterSe" +
-      "rvice\022e\n\024GetSchemaAlterStatus\022%.hbase.pb" +
-      ".GetSchemaAlterStatusRequest\032&.hbase.pb." +
-      "GetSchemaAlterStatusResponse\022b\n\023GetTable" +
-      "Descriptors\022$.hbase.pb.GetTableDescripto" +
-      "rsRequest\032%.hbase.pb.GetTableDescriptors" +
-      "Response\022P\n\rGetTableNames\022\036.hbase.pb.Get",
-      "TableNamesRequest\032\037.hbase.pb.GetTableNam" +
-      "esResponse\022Y\n\020GetClusterStatus\022!.hbase.p" +
-      "b.GetClusterStatusRequest\032\".hbase.pb.Get" +
-      "ClusterStatusResponse\022V\n\017IsMasterRunning" +
-      "\022 .hbase.pb.IsMasterRunningRequest\032!.hba" +
-      "se.pb.IsMasterRunningResponse\022D\n\tAddColu" +
-      "mn\022\032.hbase.pb.AddColumnRequest\032\033.hbase.p" +
-      "b.AddColumnResponse\022M\n\014DeleteColumn\022\035.hb" +
-      "ase.pb.DeleteColumnRequest\032\036.hbase.pb.De" +
-      "leteColumnResponse\022M\n\014ModifyColumn\022\035.hba",
-      "se.pb.ModifyColumnRequest\032\036.hbase.pb.Mod" +
-      "ifyColumnResponse\022G\n\nMoveRegion\022\033.hbase." +
-      "pb.MoveRegionRequest\032\034.hbase.pb.MoveRegi" +
-      "onResponse\022k\n\026DispatchMergingRegions\022\'.h" +
-      "base.pb.DispatchMergingRegionsRequest\032(." +
-      "hbase.pb.DispatchMergingRegionsResponse\022" +
-      "M\n\014AssignRegion\022\035.hbase.pb.AssignRegionR" +
-      "equest\032\036.hbase.pb.AssignRegionResponse\022S" +
-      "\n\016UnassignRegion\022\037.hbase.pb.UnassignRegi" +
-      "onRequest\032 .hbase.pb.UnassignRegionRespo",
-      "nse\022P\n\rOfflineRegion\022\036.hbase.pb.OfflineR" +
-      "egionRequest\032\037.hbase.pb.OfflineRegionRes" +
-      "ponse\022J\n\013DeleteTable\022\034.hbase.pb.DeleteTa" +
-      "bleRequest\032\035.hbase.pb.DeleteTableRespons" +
-      "e\022P\n\rtruncateTable\022\036.hbase.pb.TruncateTa" +
-      "bleRequest\032\037.hbase.pb.TruncateTableRespo" +
-      "nse\022J\n\013EnableTable\022\034.hbase.pb.EnableTabl" +
-      "eRequest\032\035.hbase.pb.EnableTableResponse\022" +
-      "M\n\014DisableTable\022\035.hbase.pb.DisableTableR" +
-      "equest\032\036.hbase.pb.DisableTableResponse\022J",
-      "\n\013ModifyTable\022\034.hbase.pb.ModifyTableRequ" +
-      "est\032\035.hbase.pb.ModifyTableResponse\022J\n\013Cr" +
-      "eateTable\022\034.hbase.pb.CreateTableRequest\032" +
-      "\035.hbase.pb.CreateTableResponse\022A\n\010Shutdo" +
-      "wn\022\031.hbase.pb.ShutdownRequest\032\032.hbase.pb" +
-      ".ShutdownResponse\022G\n\nStopMaster\022\033.hbase." +
-      "pb.StopMasterRequest\032\034.hbase.pb.StopMast" +
-      "erResponse\022>\n\007Balance\022\030.hbase.pb.Balance" +
-      "Request\032\031.hbase.pb.BalanceResponse\022_\n\022Se" +
-      "tBalancerRunning\022#.hbase.pb.SetBalancerR",
-      "unningRequest\032$.hbase.pb.SetBalancerRunn" +
-      "ingResponse\022\\\n\021IsBalancerEnabled\022\".hbase" +
-      ".pb.IsBalancerEnabledRequest\032#.hbase.pb." +
-      "IsBalancerEnabledResponse\022D\n\tNormalize\022\032" +
-      ".hbase.pb.NormalizeRequest\032\033.hbase.pb.No" +
-      "rmalizeResponse\022e\n\024SetNormalizerRunning\022" +
-      "%.hbase.pb.SetNormalizerRunningRequest\032&" +
-      ".hbase.pb.SetNormalizerRunningResponse\022b" +
-      "\n\023IsNormalizerEnabled\022$.hbase.pb.IsNorma" +
-      "lizerEnabledRequest\032%.hbase.pb.IsNormali",
-      "zerEnabledResponse\022S\n\016RunCatalogScan\022\037.h" +
-      "base.pb.RunCatalogScanRequest\032 .hbase.pb" +
-      ".RunCatalogScanResponse\022e\n\024EnableCatalog" +
-      "Janitor\022%.hbase.pb.EnableCatalogJanitorR" +
-      "equest\032&.hbase.pb.EnableCatalogJanitorRe" +
-      "sponse\022n\n\027IsCatalogJanitorEnabled\022(.hbas" +
-      "e.pb.IsCatalogJanitorEnabledRequest\032).hb" +
-      "ase.pb.IsCatalogJanitorEnabledResponse\022^" +
-      "\n\021ExecMasterService\022#.hbase.pb.Coprocess" +
-      "orServiceRequest\032$.hbase.pb.CoprocessorS",
-      "erviceResponse\022A\n\010Snapshot\022\031.hbase.pb.Sn" +
-      "apshotRequest\032\032.hbase.pb.SnapshotRespons" +
-      "e\022h\n\025GetCompletedSnapshots\022&.hbase.pb.Ge" +
-      "tCompletedSnapshotsRequest\032\'.hbase.pb.Ge" +
-      "tCompletedSnapshotsResponse\022S\n\016DeleteSna" +
-      "pshot\022\037.hbase.pb.DeleteSnapshotRequest\032 " +
-      ".hbase.pb.DeleteSnapshotResponse\022S\n\016IsSn" +
-      "apshotDone\022\037.hbase.pb.IsSnapshotDoneRequ" +
-      "est\032 .hbase.pb.IsSnapshotDoneResponse\022V\n" +
-      "\017RestoreSnapshot\022 .hbase.pb.RestoreSnaps",
-      "hotRequest\032!.hbase.pb.RestoreSnapshotRes" +
-      "ponse\022h\n\025IsRestoreSnapshotDone\022&.hbase.p" +
-      "b.IsRestoreSnapshotDoneRequest\032\'.hbase.p" +
-      "b.IsRestoreSnapshotDoneResponse\022P\n\rExecP" +
-      "rocedure\022\036.hbase.pb.ExecProcedureRequest" +
-      "\032\037.hbase.pb.ExecProcedureResponse\022W\n\024Exe" +
-      "cProcedureWithRet\022\036.hbase.pb.ExecProcedu" +
-      "reRequest\032\037.hbase.pb.ExecProcedureRespon" +
-      "se\022V\n\017IsProcedureDone\022 .hbase.pb.IsProce" +
-      "dureDoneRequest\032!.hbase.pb.IsProcedureDo",
-      "neResponse\022V\n\017ModifyNamespace\022 .hbase.pb" +
-      ".ModifyNamespaceRequest\032!.hbase.pb.Modif" +
-      "yNamespaceResponse\022V\n\017CreateNamespace\022 ." +
-      "hbase.pb.CreateNamespaceRequest\032!.hbase." +
-      "pb.CreateNamespaceResponse\022V\n\017DeleteName" +
-      "space\022 .hbase.pb.DeleteNamespaceRequest\032" +
-      "!.hbase.pb.DeleteNamespaceResponse\022k\n\026Ge" +
-      "tNamespaceDescriptor\022\'.hbase.pb.GetNames" +
-      "paceDescriptorRequest\032(.hbase.pb.GetName" +
-      "spaceDescriptorResponse\022q\n\030ListNamespace",
-      "Descriptors\022).hbase.pb.ListNamespaceDesc" +
-      "riptorsRequest\032*.hbase.pb.ListNamespaceD" +
-      "escriptorsResponse\022\206\001\n\037ListTableDescript" +
-      "orsByNamespace\0220.hbase.pb.ListTableDescr" +
-      "iptorsByNamespaceRequest\0321.hbase.pb.List" +
-      "TableDescriptorsByNamespaceResponse\022t\n\031L" +
-      "istTableNamesByNamespace\022*.hbase.pb.List" +
-      "TableNamesByNamespaceRequest\032+.hbase.pb." +
-      "ListTableNamesByNamespaceResponse\022P\n\rGet" +
-      "TableState\022\036.hbase.pb.GetTableStateReque",
-      "st\032\037.hbase.pb.GetTableStateResponse\022A\n\010S" +
-      "etQuota\022\031.hbase.pb.SetQuotaRequest\032\032.hba" +
-      "se.pb.SetQuotaResponse\022x\n\037getLastMajorCo" +
-      "mpactionTimestamp\022).hbase.pb.MajorCompac" +
-      "tionTimestampRequest\032*.hbase.pb.MajorCom" +
-      "pactionTimestampResponse\022\212\001\n(getLastMajo" +
-      "rCompactionTimestampForRegion\0222.hbase.pb" +
-      ".MajorCompactionTimestampForRegionReques" +
-      "t\032*.hbase.pb.MajorCompactionTimestampRes" +
-      "ponse\022_\n\022getProcedureResult\022#.hbase.pb.G",
-      "etProcedureResultRequest\032$.hbase.pb.GetP" +
-      "rocedureResultResponse\022h\n\027getSecurityCap" +
-      "abilities\022%.hbase.pb.SecurityCapabilitie" +
-      "sRequest\032&.hbase.pb.SecurityCapabilities" +
-      "Response\022S\n\016AbortProcedure\022\037.hbase.pb.Ab" +
-      "ortProcedureRequest\032 .hbase.pb.AbortProc" +
-      "edureResponse\022S\n\016ListProcedures\022\037.hbase." +
-      "pb.ListProceduresRequest\032 .hbase.pb.List" +
-      "ProceduresResponseBB\n*org.apache.hadoop." +
-      "hbase.protobuf.generatedB\014MasterProtosH\001",
-      "\210\001\001\240\001\001"
+      "\010\"l\n\022SetSwitchesRequest\022\017\n\007enabled\030\001 \002(\010" +
+      "\022\023\n\013synchronous\030\002 \001(\010\0220\n\014switch_types\030\003 " +
+      "\003(\0162\032.hbase.pb.MasterSwitchType\")\n\023SetSw" +
+      "itchesResponse\022\022\n\nprev_value\030\001 \003(\010\"I\n\026Is" +
+      "SwitchEnabledRequest\022/\n\013switch_type\030\001 \002(" +
+      "\0162\032.hbase.pb.MasterSwitchType\"*\n\027IsSwitc" +
+      "hEnabledResponse\022\017\n\007enabled\030\001 \002(\010\"\022\n\020Nor" +
+      "malizeRequest\"+\n\021NormalizeResponse\022\026\n\016no",
+      "rmalizer_ran\030\001 \002(\010\")\n\033SetNormalizerRunni" +
+      "ngRequest\022\n\n\002on\030\001 \002(\010\"=\n\034SetNormalizerRu" +
+      "nningResponse\022\035\n\025prev_normalizer_value\030\001" +
+      " \001(\010\"\034\n\032IsNormalizerEnabledRequest\".\n\033Is" +
+      "NormalizerEnabledResponse\022\017\n\007enabled\030\001 \002" +
+      "(\010\"\027\n\025RunCatalogScanRequest\"-\n\026RunCatalo" +
+      "gScanResponse\022\023\n\013scan_result\030\001 \001(\005\"-\n\033En" +
+      "ableCatalogJanitorRequest\022\016\n\006enable\030\001 \002(" +
+      "\010\"2\n\034EnableCatalogJanitorResponse\022\022\n\npre" +
+      "v_value\030\001 \001(\010\" \n\036IsCatalogJanitorEnabled",
+      "Request\"0\n\037IsCatalogJanitorEnabledRespon" +
+      "se\022\r\n\005value\030\001 \002(\010\"B\n\017SnapshotRequest\022/\n\010" +
+      "snapshot\030\001 \002(\0132\035.hbase.pb.SnapshotDescri" +
+      "ption\",\n\020SnapshotResponse\022\030\n\020expected_ti" +
+      "meout\030\001 \002(\003\"\036\n\034GetCompletedSnapshotsRequ" +
+      "est\"Q\n\035GetCompletedSnapshotsResponse\0220\n\t" +
+      "snapshots\030\001 \003(\0132\035.hbase.pb.SnapshotDescr" +
+      "iption\"H\n\025DeleteSnapshotRequest\022/\n\010snaps" +
+      "hot\030\001 \002(\0132\035.hbase.pb.SnapshotDescription" +
+      "\"\030\n\026DeleteSnapshotResponse\"I\n\026RestoreSna",
+      "pshotRequest\022/\n\010snapshot\030\001 \002(\0132\035.hbase.p" +
+      "b.SnapshotDescription\"\031\n\027RestoreSnapshot" +
+      "Response\"H\n\025IsSnapshotDoneRequest\022/\n\010sna" +
+      "pshot\030\001 \001(\0132\035.hbase.pb.SnapshotDescripti" +
+      "on\"^\n\026IsSnapshotDoneResponse\022\023\n\004done\030\001 \001" +
+      "(\010:\005false\022/\n\010snapshot\030\002 \001(\0132\035.hbase.pb.S" +
+      "napshotDescription\"O\n\034IsRestoreSnapshotD" +
+      "oneRequest\022/\n\010snapshot\030\001 \001(\0132\035.hbase.pb." +
+      "SnapshotDescription\"4\n\035IsRestoreSnapshot" +
+      "DoneResponse\022\023\n\004done\030\001 \001(\010:\005false\"F\n\033Get",
+      "SchemaAlterStatusRequest\022\'\n\ntable_name\030\001" +
+      " \002(\0132\023.hbase.pb.TableName\"T\n\034GetSchemaAl" +
+      "terStatusResponse\022\035\n\025yet_to_update_regio" +
+      "ns\030\001 \001(\r\022\025\n\rtotal_regions\030\002 \001(\r\"\213\001\n\032GetT" +
+      "ableDescriptorsRequest\022(\n\013table_names\030\001 " +
+      "\003(\0132\023.hbase.pb.TableName\022\r\n\005regex\030\002 \001(\t\022" +
+      "!\n\022include_sys_tables\030\003 \001(\010:\005false\022\021\n\tna" +
+      "mespace\030\004 \001(\t\"J\n\033GetTableDescriptorsResp" +
+      "onse\022+\n\014table_schema\030\001 \003(\0132\025.hbase.pb.Ta" +
+      "bleSchema\"[\n\024GetTableNamesRequest\022\r\n\005reg",
+      "ex\030\001 \001(\t\022!\n\022include_sys_tables\030\002 \001(\010:\005fa" +
+      "lse\022\021\n\tnamespace\030\003 \001(\t\"A\n\025GetTableNamesR" +
+      "esponse\022(\n\013table_names\030\001 \003(\0132\023.hbase.pb." +
+      "TableName\"?\n\024GetTableStateRequest\022\'\n\ntab" +
+      "le_name\030\001 \002(\0132\023.hbase.pb.TableName\"B\n\025Ge" +
+      "tTableStateResponse\022)\n\013table_state\030\001 \002(\013" +
+      "2\024.hbase.pb.TableState\"\031\n\027GetClusterStat" +
+      "usRequest\"K\n\030GetClusterStatusResponse\022/\n" +
+      "\016cluster_status\030\001 \002(\0132\027.hbase.pb.Cluster" +
+      "Status\"\030\n\026IsMasterRunningRequest\"4\n\027IsMa",
+      "sterRunningResponse\022\031\n\021is_master_running" +
+      "\030\001 \002(\010\"I\n\024ExecProcedureRequest\0221\n\tproced" +
+      "ure\030\001 \002(\0132\036.hbase.pb.ProcedureDescriptio" +
+      "n\"F\n\025ExecProcedureResponse\022\030\n\020expected_t" +
+      "imeout\030\001 \001(\003\022\023\n\013return_data\030\002 \001(\014\"K\n\026IsP" +
+      "rocedureDoneRequest\0221\n\tprocedure\030\001 \001(\0132\036" +
+      ".hbase.pb.ProcedureDescription\"`\n\027IsProc" +
+      "edureDoneResponse\022\023\n\004done\030\001 \001(\010:\005false\0220" +
+      "\n\010snapshot\030\002 \001(\0132\036.hbase.pb.ProcedureDes" +
+      "cription\",\n\031GetProcedureResultRequest\022\017\n",
+      "\007proc_id\030\001 \002(\004\"\371\001\n\032GetProcedureResultRes" +
+      "ponse\0229\n\005state\030\001 \002(\0162*.hbase.pb.GetProce" +
+      "dureResultResponse.State\022\022\n\nstart_time\030\002" +
+      " \001(\004\022\023\n\013last_update\030\003 \001(\004\022\016\n\006result\030\004 \001(" +
+      "\014\0224\n\texception\030\005 \001(\0132!.hbase.pb.ForeignE" +
+      "xceptionMessage\"1\n\005State\022\r\n\tNOT_FOUND\020\000\022" +
+      "\013\n\007RUNNING\020\001\022\014\n\010FINISHED\020\002\"M\n\025AbortProce" +
+      "dureRequest\022\017\n\007proc_id\030\001 \002(\004\022#\n\025mayInter" +
+      "ruptIfRunning\030\002 \001(\010:\004true\"6\n\026AbortProced" +
+      "ureResponse\022\034\n\024is_procedure_aborted\030\001 \002(",
+      "\010\"\027\n\025ListProceduresRequest\"@\n\026ListProced" +
+      "uresResponse\022&\n\tprocedure\030\001 \003(\0132\023.hbase." +
+      "pb.Procedure\"\315\001\n\017SetQuotaRequest\022\021\n\tuser" +
+      "_name\030\001 \001(\t\022\022\n\nuser_group\030\002 \001(\t\022\021\n\tnames" +
+      "pace\030\003 \001(\t\022\'\n\ntable_name\030\004 \001(\0132\023.hbase.p" +
+      "b.TableName\022\022\n\nremove_all\030\005 \001(\010\022\026\n\016bypas" +
+      "s_globals\030\006 \001(\010\022+\n\010throttle\030\007 \001(\0132\031.hbas" +
+      "e.pb.ThrottleRequest\"\022\n\020SetQuotaResponse" +
+      "\"J\n\037MajorCompactionTimestampRequest\022\'\n\nt" +
+      "able_name\030\001 \002(\0132\023.hbase.pb.TableName\"U\n(",
+      "MajorCompactionTimestampForRegionRequest" +
+      "\022)\n\006region\030\001 \002(\0132\031.hbase.pb.RegionSpecif" +
+      "ier\"@\n MajorCompactionTimestampResponse\022" +
+      "\034\n\024compaction_timestamp\030\001 \002(\003\"\035\n\033Securit" +
+      "yCapabilitiesRequest\"\354\001\n\034SecurityCapabil" +
+      "itiesResponse\022G\n\014capabilities\030\001 \003(\01621.hb" +
+      "ase.pb.SecurityCapabilitiesResponse.Capa" +
+      "bility\"\202\001\n\nCapability\022\031\n\025SIMPLE_AUTHENTI" +
+      "CATION\020\000\022\031\n\025SECURE_AUTHENTICATION\020\001\022\021\n\rA" +
+      "UTHORIZATION\020\002\022\026\n\022CELL_AUTHORIZATION\020\003\022\023",
+      "\n\017CELL_VISIBILITY\020\004*(\n\020MasterSwitchType\022" +
+      "\t\n\005SPLIT\020\000\022\t\n\005MERGE\020\0012\240(\n\rMasterService\022" +
+      "e\n\024GetSchemaAlterStatus\022%.hbase.pb.GetSc" +
+      "hemaAlterStatusRequest\032&.hbase.pb.GetSch" +
+      "emaAlterStatusResponse\022b\n\023GetTableDescri" +
+      "ptors\022$.hbase.pb.GetTableDescriptorsRequ" +
+      "est\032%.hbase.pb.GetTableDescriptorsRespon" +
+      "se\022P\n\rGetTableNames\022\036.hbase.pb.GetTableN" +
+      "amesRequest\032\037.hbase.pb.GetTableNamesResp" +
+      "onse\022Y\n\020GetClusterStatus\022!.hbase.pb.GetC",
+      "lusterStatusRequest\032\".hbase.pb.GetCluste" +
+      "rStatusResponse\022V\n\017IsMasterRunning\022 .hba" +
+      "se.pb.IsMasterRunningRequest\032!.hbase.pb." +
+      "IsMasterRunningResponse\022D\n\tAddColumn\022\032.h" +
+      "base.pb.AddColumnRequest\032\033.hbase.pb.AddC" +
+      "olumnResponse\022M\n\014DeleteColumn\022\035.hbase.pb" +
+      ".DeleteColumnRequest\032\036.hbase.pb.DeleteCo" +
+      "lumnResponse\022M\n\014ModifyColumn\022\035.hbase.pb." +
+      "ModifyColumnRequest\032\036.hbase.pb.ModifyCol" +
+      "umnResponse\022G\n\nMoveRegion\022\033.hbase.pb.Mov",
+      "eRegionRequest\032\034.hbase.pb.MoveRegionResp" +
+      "onse\022k\n\026DispatchMergingRegions\022\'.hbase.p" +
+      "b.DispatchMergingRegionsRequest\032(.hbase." +
+      "pb.DispatchMergingRegionsResponse\022M\n\014Ass" +
+      "ignRegion\022\035.hbase.pb.AssignRegionRequest" +
+      "\032\036.hbase.pb.AssignRegionResponse\022S\n\016Unas" +
+      "signRegion\022\037.hbase.pb.UnassignRegionRequ" +
+      "est\032 .hbase.pb.UnassignRegionResponse\022P\n" +
+      "\rOfflineRegion\022\036.hbase.pb.OfflineRegionR" +
+      "equest\032\037.hbase.pb.OfflineRegionResponse\022",
+      "J\n\013DeleteTable\022\034.hbase.pb.DeleteTableReq" +
+      "uest\032\035.hbase.pb.DeleteTableResponse\022P\n\rt" +
+      "runcateTable\022\036.hbase.pb.TruncateTableReq" +
+      "uest\032\037.hbase.pb.TruncateTableResponse\022J\n" +
+      "\013EnableTable\022\034.hbase.pb.EnableTableReque" +
+      "st\032\035.hbase.pb.EnableTableResponse\022M\n\014Dis" +
+      "ableTable\022\035.hbase.pb.DisableTableRequest" +
+      "\032\036.hbase.pb.DisableTableResponse\022J\n\013Modi" +
+      "fyTable\022\034.hbase.pb.ModifyTableRequest\032\035." +
+      "hbase.pb.ModifyTableResponse\022J\n\013CreateTa",
+      "ble\022\034.hbase.pb.CreateTableRequest\032\035.hbas" +
+      "e.pb.CreateTableResponse\022A\n\010Shutdown\022\031.h" +
+      "base.pb.ShutdownRequest\032\032.hbase.pb.Shutd" +
+      "ownResponse\022G\n\nStopMaster\022\033.hbase.pb.Sto" +
+      "pMasterRequest\032\034.hbase.pb.StopMasterResp" +
+      "onse\022>\n\007Balance\022\030.hbase.pb.BalanceReques" +
+      "t\032\031.hbase.pb.BalanceResponse\022_\n\022SetBalan" +
+      "cerRunning\022#.hbase.pb.SetBalancerRunning" +
+      "Request\032$.hbase.pb.SetBalancerRunningRes" +
+      "ponse\022\\\n\021IsBalancerEnabled\022\".hbase.pb.Is",
+      "BalancerEnabledRequest\032#.hbase.pb.IsBala" +
+      "ncerEnabledResponse\022J\n\013SetSwitches\022\034.hba" +
+      "se.pb.SetSwitchesRequest\032\035.hbase.pb.SetS" +
+      "witchesResponse\022V\n\017IsSwitchEnabled\022 .hba" +
+      "se.pb.IsSwitchEnabledRequest\032!.hbase.pb." +
+      "IsSwitchEnabledResponse\022D\n\tNormalize\022\032.h" +
+      "base.pb.NormalizeRequest\032\033.hbase.pb.Norm" +
+      "alizeResponse\022e\n\024SetNormalizerRunning\022%." +
+      "hbase.pb.SetNormalizerRunningRequest\032&.h" +
+      "base.pb.SetNormalizerRunningResponse\022b\n\023",
+      "IsNormalizerEnabled\022$.hbase.pb.IsNormali" +
+      "zerEnabledRequest\032%.hbase.pb.IsNormalize" +
+      "rEnabledResponse\022S\n\016RunCatalogScan\022\037.hba" +
+      "se.pb.RunCatalogScanRequest\032 .hbase.pb.R" +
+      "unCatalogScanResponse\022e\n\024EnableCatalogJa" +
+      "nitor\022%.hbase.pb.EnableCatalogJanitorReq" +
+      "uest\032&.hbase.pb.EnableCatalogJanitorResp" +
+      "onse\022n\n\027IsCatalogJanitorEnabled\022(.hbase." +
+      "pb.IsCatalogJanitorEnabledRequest\032).hbas" +
+      "e.pb.IsCatalogJanitorEnabledResponse\022^\n\021",
+      "ExecMasterService\022#.hbase.pb.Coprocessor" +
+      "ServiceRequest\032$.hbase.pb.CoprocessorSer" +
+      "viceResponse\022A\n\010Snapshot\022\031.hbase.pb.Snap" +
+      "shotRequest\032\032.hbase.pb.SnapshotResponse\022" +
+      "h\n\025GetCompletedSnapshots\022&.hbase.pb.GetC" +
+      "ompletedSnapshotsRequest\032\'.hbase.pb.GetC" +
+      "ompletedSnapshotsResponse\022S\n\016DeleteSnaps" +
+      "hot\022\037.hbase.pb.DeleteSnapshotRequest\032 .h" +
+      "base.pb.DeleteSnapshotResponse\022S\n\016IsSnap" +
+      "shotDone\022\037.hbase.pb.IsSnapshotDoneReques",
+      "t\032 .hbase.pb.IsSnapshotDoneResponse\022V\n\017R" +
+      "estoreSnapshot\022 .hbase.pb.RestoreSnapsho" +
+      "tRequest\032!.hbase.pb.RestoreSnapshotRespo" +
+      "nse\022h\n\025IsRestoreSnapshotDone\022&.hbase.pb." +
+      "IsRestoreSnapshotDoneRequest\032\'.hbase.pb." +
+      "IsRestoreSnapshotDoneResponse\022P\n\rExecPro" +
+      "cedure\022\036.hbase.pb.ExecProcedureRequest\032\037" +
+      ".hbase.pb.ExecProcedureResponse\022W\n\024ExecP" +
+      "rocedureWithRet\022\036.hbase.pb.ExecProcedure" +
+      "Request\032\037.hbase.pb.ExecProcedureResponse",
+      "\022V\n\017IsProcedureDone\022 .hbase.pb.IsProcedu" +
+      "reDoneRequest\032!.hbase.pb.IsProcedureDone" +
+      "Response\022V\n\017ModifyNamespace\022 .hbase.pb.M" +
+      "odifyNamespaceRequest\032!.hbase.pb.ModifyN" +
+      "amespaceResponse\022V\n\017CreateNamespace\022 .hb" +
+      "ase.pb.CreateNamespaceRequest\032!.hbase.pb" +
+      ".CreateNamespaceResponse\022V\n\017DeleteNamesp" +
+      "ace\022 .hbase.pb.DeleteNamespaceRequest\032!." +
+      "hbase.pb.DeleteNamespaceResponse\022k\n\026GetN" +
+      "amespaceDescriptor\022\'.hbase.pb.GetNamespa",
+      "ceDescriptorRequest\032(.hbase.pb.GetNamesp" +
+      "aceDescriptorResponse\022q\n\030ListNamespaceDe" +
+      "scriptors\022).hbase.pb.ListNamespaceDescri" +
+      "ptorsRequest\032*.hbase.pb.ListNamespaceDes" +
+      "criptorsResponse\022\206\001\n\037ListTableDescriptor" +
+      "sByNamespace\0220.hbase.pb.ListTableDescrip" +
+      "torsByNamespaceRequest\0321.hbase.pb.ListTa" +
+      "bleDescriptorsByNamespaceResponse\022t\n\031Lis" +
+      "tTableNamesByNamespace\022*.hbase.pb.ListTa" +
+      "bleNamesByNamespaceRequest\032+.hbase.pb.Li",
+      "stTableNamesByNamespaceResponse\022P\n\rGetTa" +
+      "bleState\022\036.hbase.pb.GetTableStateRequest" +
+      "\032\037.hbase.pb.GetTableStateResponse\022A\n\010Set" +
+      "Quota\022\031.hbase.pb.SetQuotaRequest\032\032.hbase" +
+      ".pb.SetQuotaResponse\022x\n\037getLastMajorComp" +
+      "actionTimestamp\022).hbase.pb.MajorCompacti" +
+      "onTimestampRequest\032*.hbase.pb.MajorCompa" +
+      "ctionTimestampResponse\022\212\001\n(getLastMajorC" +
+      "ompactionTimestampForRegion\0222.hbase.pb.M" +
+      "ajorCompactionTimestampForRegionRequest\032",
+      "*.hbase.pb.MajorCompactionTimestampRespo" +
+      "nse\022_\n\022getProcedureResult\022#.hbase.pb.Get" +
+      "ProcedureResultRequest\032$.hbase.pb.GetPro" +
+      "cedureResultResponse\022h\n\027getSecurityCapab" +
+      "ilities\022%.hbase.pb.SecurityCapabilitiesR" +
+      "equest\032&.hbase.pb.SecurityCapabilitiesRe" +
+      "sponse\022S\n\016AbortProcedure\022\037.hbase.pb.Abor" +
+      "tProcedureRequest\032 .hbase.pb.AbortProced" +
+      "ureResponse\022S\n\016ListProcedures\022\037.hbase.pb" +
+      ".ListProceduresRequest\032 .hbase.pb.ListPr",
+      "oceduresResponseBB\n*org.apache.hadoop.hb" +
+      "ase.protobuf.generatedB\014MasterProtosH\001\210\001" +
+      "\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -62748,320 +65132,344 @@ public final class MasterProtos {
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsBalancerEnabledResponse_descriptor,
               new java.lang.String[] { "Enabled", });
-          internal_static_hbase_pb_NormalizeRequest_descriptor =
+          internal_static_hbase_pb_SetSwitchesRequest_descriptor =
             getDescriptor().getMessageTypes().get(52);
+          internal_static_hbase_pb_SetSwitchesRequest_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hbase_pb_SetSwitchesRequest_descriptor,
+              new java.lang.String[] { "Enabled", "Synchronous", "SwitchTypes", });
+          internal_static_hbase_pb_SetSwitchesResponse_descriptor =
+            getDescriptor().getMessageTypes().get(53);
+          internal_static_hbase_pb_SetSwitchesResponse_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hbase_pb_SetSwitchesResponse_descriptor,
+              new java.lang.String[] { "PrevValue", });
+          internal_static_hbase_pb_IsSwitchEnabledRequest_descriptor =
+            getDescriptor().getMessageTypes().get(54);
+          internal_static_hbase_pb_IsSwitchEnabledRequest_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hbase_pb_IsSwitchEnabledRequest_descriptor,
+              new java.lang.String[] { "SwitchType", });
+          internal_static_hbase_pb_IsSwitchEnabledResponse_descriptor =
+            getDescriptor().getMessageTypes().get(55);
+          internal_static_hbase_pb_IsSwitchEnabledResponse_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hbase_pb_IsSwitchEnabledResponse_descriptor,
+              new java.lang.String[] { "Enabled", });
+          internal_static_hbase_pb_NormalizeRequest_descriptor =
+            getDescriptor().getMessageTypes().get(56);
           internal_static_hbase_pb_NormalizeRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_NormalizeRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_NormalizeResponse_descriptor =
-            getDescriptor().getMessageTypes().get(53);
+            getDescriptor().getMessageTypes().get(57);
           internal_static_hbase_pb_NormalizeResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_NormalizeResponse_descriptor,
               new java.lang.String[] { "NormalizerRan", });
           internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor =
-            getDescriptor().getMessageTypes().get(54);
+            getDescriptor().getMessageTypes().get(58);
           internal_static_hbase_pb_SetNormalizerRunningRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SetNormalizerRunningRequest_descriptor,
               new java.lang.String[] { "On", });
           internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor =
-            getDescriptor().getMessageTypes().get(55);
+            getDescriptor().getMessageTypes().get(59);
           internal_static_hbase_pb_SetNormalizerRunningResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SetNormalizerRunningResponse_descriptor,
               new java.lang.String[] { "PrevNormalizerValue", });
           internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor =
-            getDescriptor().getMessageTypes().get(56);
+            getDescriptor().getMessageTypes().get(60);
           internal_static_hbase_pb_IsNormalizerEnabledRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsNormalizerEnabledRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor =
-            getDescriptor().getMessageTypes().get(57);
+            getDescriptor().getMessageTypes().get(61);
           internal_static_hbase_pb_IsNormalizerEnabledResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsNormalizerEnabledResponse_descriptor,
               new java.lang.String[] { "Enabled", });
           internal_static_hbase_pb_RunCatalogScanRequest_descriptor =
-            getDescriptor().getMessageTypes().get(58);
+            getDescriptor().getMessageTypes().get(62);
           internal_static_hbase_pb_RunCatalogScanRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_RunCatalogScanRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_RunCatalogScanResponse_descriptor =
-            getDescriptor().getMessageTypes().get(59);
+            getDescriptor().getMessageTypes().get(63);
           internal_static_hbase_pb_RunCatalogScanResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_RunCatalogScanResponse_descriptor,
               new java.lang.String[] { "ScanResult", });
           internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor =
-            getDescriptor().getMessageTypes().get(60);
+            getDescriptor().getMessageTypes().get(64);
           internal_static_hbase_pb_EnableCatalogJanitorRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_EnableCatalogJanitorRequest_descriptor,
               new java.lang.String[] { "Enable", });
           internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor =
-            getDescriptor().getMessageTypes().get(61);
+            getDescriptor().getMessageTypes().get(65);
           internal_static_hbase_pb_EnableCatalogJanitorResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_EnableCatalogJanitorResponse_descriptor,
               new java.lang.String[] { "PrevValue", });
           internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor =
-            getDescriptor().getMessageTypes().get(62);
+            getDescriptor().getMessageTypes().get(66);
           internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsCatalogJanitorEnabledRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor =
-            getDescriptor().getMessageTypes().get(63);
+            getDescriptor().getMessageTypes().get(67);
           internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor,
               new java.lang.String[] { "Value", });
           internal_static_hbase_pb_SnapshotRequest_descriptor =
-            getDescriptor().getMessageTypes().get(64);
+            getDescriptor().getMessageTypes().get(68);
           internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SnapshotRequest_descriptor,
               new java.lang.String[] { "Snapshot", });
           internal_static_hbase_pb_SnapshotResponse_descriptor =
-            getDescriptor().getMessageTypes().get(65);
+            getDescriptor().getMessageTypes().get(69);
           internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SnapshotResponse_descriptor,
               new java.lang.String[] { "ExpectedTimeout", });
           internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor =
-            getDescriptor().getMessageTypes().get(66);
+            getDescriptor().getMessageTypes().get(70);
           internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor =
-            getDescriptor().getMessageTypes().get(67);
+            getDescriptor().getMessageTypes().get(71);
           internal_static_hbase_pb_GetCompletedSnapshotsResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor,
               new java.lang.String[] { "Snapshots", });
           internal_static_hbase_pb_DeleteSnapshotRequest_descriptor =
-            getDescriptor().getMessageTypes().get(68);
+            getDescriptor().getMessageTypes().get(72);
           internal_static_hbase_pb_DeleteSnapshotRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_DeleteSnapshotRequest_descriptor,
               new java.lang.String[] { "Snapshot", });
           internal_static_hbase_pb_DeleteSnapshotResponse_descriptor =
-            getDescriptor().getMessageTypes().get(69);
+            getDescriptor().getMessageTypes().get(73);
           internal_static_hbase_pb_DeleteSnapshotResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_DeleteSnapshotResponse_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_RestoreSnapshotRequest_descriptor =
-            getDescriptor().getMessageTypes().get(70);
+            getDescriptor().getMessageTypes().get(74);
           internal_static_hbase_pb_RestoreSnapshotRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_RestoreSnapshotRequest_descriptor,
               new java.lang.String[] { "Snapshot", });
           internal_static_hbase_pb_RestoreSnapshotResponse_descriptor =
-            getDescriptor().getMessageTypes().get(71);
+            getDescriptor().getMessageTypes().get(75);
           internal_static_hbase_pb_RestoreSnapshotResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_RestoreSnapshotResponse_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor =
-            getDescriptor().getMessageTypes().get(72);
+            getDescriptor().getMessageTypes().get(76);
           internal_static_hbase_pb_IsSnapshotDoneRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor,
               new java.lang.String[] { "Snapshot", });
           internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor =
-            getDescriptor().getMessageTypes().get(73);
+            getDescriptor().getMessageTypes().get(77);
           internal_static_hbase_pb_IsSnapshotDoneResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor,
               new java.lang.String[] { "Done", "Snapshot", });
           internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor =
-            getDescriptor().getMessageTypes().get(74);
+            getDescriptor().getMessageTypes().get(78);
           internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor,
               new java.lang.String[] { "Snapshot", });
           internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor =
-            getDescriptor().getMessageTypes().get(75);
+            getDescriptor().getMessageTypes().get(79);
           internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor,
               new java.lang.String[] { "Done", });
           internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor =
-            getDescriptor().getMessageTypes().get(76);
+            getDescriptor().getMessageTypes().get(80);
           internal_static_hbase_pb_GetSchemaAlterStatusRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor,
               new java.lang.String[] { "TableName", });
           internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor =
-            getDescriptor().getMessageTypes().get(77);
+            getDescriptor().getMessageTypes().get(81);
           internal_static_hbase_pb_GetSchemaAlterStatusResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor,
               new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", });
           internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor =
-            getDescriptor().getMessageTypes().get(78);
+            getDescriptor().getMessageTypes().get(82);
           internal_static_hbase_pb_GetTableDescriptorsRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor,
               new java.lang.String[] { "TableNames", "Regex", "IncludeSysTables", "Namespace", });
           internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor =
-            getDescriptor().getMessageTypes().get(79);
+            getDescriptor().getMessageTypes().get(83);
           internal_static_hbase_pb_GetTableDescriptorsResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor,
               new java.lang.String[] { "TableSchema", });
           internal_static_hbase_pb_GetTableNamesRequest_descriptor =
-            getDescriptor().getMessageTypes().get(80);
+            getDescriptor().getMessageTypes().get(84);
           internal_static_hbase_pb_GetTableNamesRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableNamesRequest_descriptor,
               new java.lang.String[] { "Regex", "IncludeSysTables", "Namespace", });
           internal_static_hbase_pb_GetTableNamesResponse_descriptor =
-            getDescriptor().getMessageTypes().get(81);
+            getDescriptor().getMessageTypes().get(85);
           internal_static_hbase_pb_GetTableNamesResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableNamesResponse_descriptor,
               new java.lang.String[] { "TableNames", });
           internal_static_hbase_pb_GetTableStateRequest_descriptor =
-            getDescriptor().getMessageTypes().get(82);
+            getDescriptor().getMessageTypes().get(86);
           internal_static_hbase_pb_GetTableStateRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableStateRequest_descriptor,
               new java.lang.String[] { "TableName", });
           internal_static_hbase_pb_GetTableStateResponse_descriptor =
-            getDescriptor().getMessageTypes().get(83);
+            getDescriptor().getMessageTypes().get(87);
           internal_static_hbase_pb_GetTableStateResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetTableStateResponse_descriptor,
               new java.lang.String[] { "TableState", });
           internal_static_hbase_pb_GetClusterStatusRequest_descriptor =
-            getDescriptor().getMessageTypes().get(84);
+            getDescriptor().getMessageTypes().get(88);
           internal_static_hbase_pb_GetClusterStatusRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetClusterStatusRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_GetClusterStatusResponse_descriptor =
-            getDescriptor().getMessageTypes().get(85);
+            getDescriptor().getMessageTypes().get(89);
           internal_static_hbase_pb_GetClusterStatusResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetClusterStatusResponse_descriptor,
               new java.lang.String[] { "ClusterStatus", });
           internal_static_hbase_pb_IsMasterRunningRequest_descriptor =
-            getDescriptor().getMessageTypes().get(86);
+            getDescriptor().getMessageTypes().get(90);
           internal_static_hbase_pb_IsMasterRunningRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsMasterRunningRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_IsMasterRunningResponse_descriptor =
-            getDescriptor().getMessageTypes().get(87);
+            getDescriptor().getMessageTypes().get(91);
           internal_static_hbase_pb_IsMasterRunningResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsMasterRunningResponse_descriptor,
               new java.lang.String[] { "IsMasterRunning", });
           internal_static_hbase_pb_ExecProcedureRequest_descriptor =
-            getDescriptor().getMessageTypes().get(88);
+            getDescriptor().getMessageTypes().get(92);
           internal_static_hbase_pb_ExecProcedureRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_ExecProcedureRequest_descriptor,
               new java.lang.String[] { "Procedure", });
           internal_static_hbase_pb_ExecProcedureResponse_descriptor =
-            getDescriptor().getMessageTypes().get(89);
+            getDescriptor().getMessageTypes().get(93);
           internal_static_hbase_pb_ExecProcedureResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_ExecProcedureResponse_descriptor,
               new java.lang.String[] { "ExpectedTimeout", "ReturnData", });
           internal_static_hbase_pb_IsProcedureDoneRequest_descriptor =
-            getDescriptor().getMessageTypes().get(90);
+            getDescriptor().getMessageTypes().get(94);
           internal_static_hbase_pb_IsProcedureDoneRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsProcedureDoneRequest_descriptor,
               new java.lang.String[] { "Procedure", });
           internal_static_hbase_pb_IsProcedureDoneResponse_descriptor =
-            getDescriptor().getMessageTypes().get(91);
+            getDescriptor().getMessageTypes().get(95);
           internal_static_hbase_pb_IsProcedureDoneResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_IsProcedureDoneResponse_descriptor,
               new java.lang.String[] { "Done", "Snapshot", });
           internal_static_hbase_pb_GetProcedureResultRequest_descriptor =
-            getDescriptor().getMessageTypes().get(92);
+            getDescriptor().getMessageTypes().get(96);
           internal_static_hbase_pb_GetProcedureResultRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetProcedureResultRequest_descriptor,
               new java.lang.String[] { "ProcId", });
           internal_static_hbase_pb_GetProcedureResultResponse_descriptor =
-            getDescriptor().getMessageTypes().get(93);
+            getDescriptor().getMessageTypes().get(97);
           internal_static_hbase_pb_GetProcedureResultResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_GetProcedureResultResponse_descriptor,
               new java.lang.String[] { "State", "StartTime", "LastUpdate", "Result", "Exception", });
           internal_static_hbase_pb_AbortProcedureRequest_descriptor =
-            getDescriptor().getMessageTypes().get(94);
+            getDescriptor().getMessageTypes().get(98);
           internal_static_hbase_pb_AbortProcedureRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_AbortProcedureRequest_descriptor,
               new java.lang.String[] { "ProcId", "MayInterruptIfRunning", });
           internal_static_hbase_pb_AbortProcedureResponse_descriptor =
-            getDescriptor().getMessageTypes().get(95);
+            getDescriptor().getMessageTypes().get(99);
           internal_static_hbase_pb_AbortProcedureResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_AbortProcedureResponse_descriptor,
               new java.lang.String[] { "IsProcedureAborted", });
           internal_static_hbase_pb_ListProceduresRequest_descriptor =
-            getDescriptor().getMessageTypes().get(96);
+            getDescriptor().getMessageTypes().get(100);
           internal_static_hbase_pb_ListProceduresRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_ListProceduresRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_ListProceduresResponse_descriptor =
-            getDescriptor().getMessageTypes().get(97);
+            getDescriptor().getMessageTypes().get(101);
           internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_ListProceduresResponse_descriptor,
               new java.lang.String[] { "Procedure", });
           internal_static_hbase_pb_SetQuotaRequest_descriptor =
-            getDescriptor().getMessageTypes().get(98);
+            getDescriptor().getMessageTypes().get(102);
           internal_static_hbase_pb_SetQuotaRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SetQuotaRequest_descriptor,
               new java.lang.String[] { "UserName", "UserGroup", "Namespace", "TableName", "RemoveAll", "BypassGlobals", "Throttle", });
           internal_static_hbase_pb_SetQuotaResponse_descriptor =
-            getDescriptor().getMessageTypes().get(99);
+            getDescriptor().getMessageTypes().get(103);
           internal_static_hbase_pb_SetQuotaResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SetQuotaResponse_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor =
-            getDescriptor().getMessageTypes().get(100);
+            getDescriptor().getMessageTypes().get(104);
           internal_static_hbase_pb_MajorCompactionTimestampRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor,
               new java.lang.String[] { "TableName", });
           internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor =
-            getDescriptor().getMessageTypes().get(101);
+            getDescriptor().getMessageTypes().get(105);
           internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor,
               new java.lang.String[] { "Region", });
           internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor =
-            getDescriptor().getMessageTypes().get(102);
+            getDescriptor().getMessageTypes().get(106);
           internal_static_hbase_pb_MajorCompactionTimestampResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor,
               new java.lang.String[] { "CompactionTimestamp", });
           internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor =
-            getDescriptor().getMessageTypes().get(103);
+            getDescriptor().getMessageTypes().get(107);
           internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor,
               new java.lang.String[] { });
           internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor =
-            getDescriptor().getMessageTypes().get(104);
+            getDescriptor().getMessageTypes().get(108);
           internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor,
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
index 8dbb5ad..9805d50 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
@@ -11,13 +11,13 @@ public final class SnapshotProtos {
   public interface SnapshotFileInfoOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
 
-    // required .SnapshotFileInfo.Type type = 1;
+    // required .hbase.pb.SnapshotFileInfo.Type type = 1;
     /**
-     * required .SnapshotFileInfo.Type type = 1;
+     * required .hbase.pb.SnapshotFileInfo.Type type = 1;
      */
     boolean hasType();
     /**
-     * required .SnapshotFileInfo.Type type = 1;
+     * required .hbase.pb.SnapshotFileInfo.Type type = 1;
      */
     org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type getType();
 
@@ -67,7 +67,7 @@ public final class SnapshotProtos {
         getWalNameBytes();
   }
   /**
-   * Protobuf type {@code SnapshotFileInfo}
+   * Protobuf type {@code hbase.pb.SnapshotFileInfo}
    */
   public static final class SnapshotFileInfo extends
       com.google.protobuf.GeneratedMessage
@@ -157,12 +157,12 @@ public final class SnapshotProtos {
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_fieldAccessorTable
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Builder.class);
     }
@@ -183,7 +183,7 @@ public final class SnapshotProtos {
     }
 
     /**
-     * Protobuf enum {@code SnapshotFileInfo.Type}
+     * Protobuf enum {@code hbase.pb.SnapshotFileInfo.Type}
      */
     public enum Type
         implements com.google.protobuf.ProtocolMessageEnum {
@@ -261,21 +261,21 @@ public final class SnapshotProtos {
         this.value = value;
       }
 
-      // @@protoc_insertion_point(enum_scope:SnapshotFileInfo.Type)
+      // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotFileInfo.Type)
     }
 
     private int bitField0_;
-    // required .SnapshotFileInfo.Type type = 1;
+    // required .hbase.pb.SnapshotFileInfo.Type type = 1;
     public static final int TYPE_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type type_;
     /**
-     * required .SnapshotFileInfo.Type type = 1;
+     * required .hbase.pb.SnapshotFileInfo.Type type = 1;
      */
     public boolean hasType() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
     /**
-     * required .SnapshotFileInfo.Type type = 1;
+     * required .hbase.pb.SnapshotFileInfo.Type type = 1;
      */
     public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type getType() {
       return type_;
@@ -613,19 +613,19 @@ public final class SnapshotProtos {
       return builder;
     }
     /**
-     * Protobuf type {@code SnapshotFileInfo}
+     * Protobuf type {@code hbase.pb.SnapshotFileInfo}
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder
        implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfoOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
                 org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Builder.class);
       }
@@ -667,7 +667,7 @@ public final class SnapshotProtos {
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotFileInfo_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotFileInfo_descriptor;
       }
 
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo getDefaultInstanceForType() {
@@ -767,22 +767,22 @@ public final class SnapshotProtos {
       }
       private int bitField0_;
 
-      // required .SnapshotFileInfo.Type type = 1;
+      // required .hbase.pb.SnapshotFileInfo.Type type = 1;
       private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type.HFILE;
       /**
-       * required .SnapshotFileInfo.Type type = 1;
+       * required .hbase.pb.SnapshotFileInfo.Type type = 1;
        */
       public boolean hasType() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
       /**
-       * required .SnapshotFileInfo.Type type = 1;
+       * required .hbase.pb.SnapshotFileInfo.Type type = 1;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type getType() {
         return type_;
       }
       /**
-       * required .SnapshotFileInfo.Type type = 1;
+       * required .hbase.pb.SnapshotFileInfo.Type type = 1;
        */
       public Builder setType(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo.Type value) {
         if (value == null) {
@@ -794,7 +794,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .SnapshotFileInfo.Type type = 1;
+       * required .hbase.pb.SnapshotFileInfo.Type type = 1;
        */
       public Builder clearType() {
         bitField0_ = (bitField0_ & ~0x00000001);
@@ -1025,7 +1025,7 @@ public final class SnapshotProtos {
         return this;
       }
 
-      // @@protoc_insertion_point(builder_scope:SnapshotFileInfo)
+      // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotFileInfo)
     }
 
     static {
@@ -1033,7 +1033,7 @@ public final class SnapshotProtos {
       defaultInstance.initFields();
     }
 
-    // @@protoc_insertion_point(class_scope:SnapshotFileInfo)
+    // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotFileInfo)
   }
 
   public interface SnapshotRegionManifestOrBuilder
@@ -1049,47 +1049,47 @@ public final class SnapshotProtos {
      */
     int getVersion();
 
-    // required .RegionInfo region_info = 2;
+    // required .hbase.pb.RegionInfo region_info = 2;
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     boolean hasRegionInfo();
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo();
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder();
 
-    // repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+    // repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     java.util.List 
         getFamilyFilesList();
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles getFamilyFiles(int index);
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     int getFamilyFilesCount();
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     java.util.List 
         getFamilyFilesOrBuilderList();
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder getFamilyFilesOrBuilder(
         int index);
   }
   /**
-   * Protobuf type {@code SnapshotRegionManifest}
+   * Protobuf type {@code hbase.pb.SnapshotRegionManifest}
    */
   public static final class SnapshotRegionManifest extends
       com.google.protobuf.GeneratedMessage
@@ -1182,12 +1182,12 @@ public final class SnapshotProtos {
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_descriptor;
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_fieldAccessorTable
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder.class);
     }
@@ -1225,17 +1225,17 @@ public final class SnapshotProtos {
       com.google.protobuf.ByteString
           getNameBytes();
 
-      // optional .Reference reference = 2;
+      // optional .hbase.pb.Reference reference = 2;
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       boolean hasReference();
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getReference();
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder getReferenceOrBuilder();
 
@@ -1258,7 +1258,7 @@ public final class SnapshotProtos {
       long getFileSize();
     }
     /**
-     * Protobuf type {@code SnapshotRegionManifest.StoreFile}
+     * Protobuf type {@code hbase.pb.SnapshotRegionManifest.StoreFile}
      */
     public static final class StoreFile extends
         com.google.protobuf.GeneratedMessage
@@ -1345,12 +1345,12 @@ public final class SnapshotProtos {
       }
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_StoreFile_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_StoreFile_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
                 org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder.class);
       }
@@ -1414,23 +1414,23 @@ public final class SnapshotProtos {
         }
       }
 
-      // optional .Reference reference = 2;
+      // optional .hbase.pb.Reference reference = 2;
       public static final int REFERENCE_FIELD_NUMBER = 2;
       private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference reference_;
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       public boolean hasReference() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
       }
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getReference() {
         return reference_;
       }
       /**
-       * optional .Reference reference = 2;
+       * optional .hbase.pb.Reference reference = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder getReferenceOrBuilder() {
         return reference_;
@@ -1652,19 +1652,19 @@ public final class SnapshotProtos {
         return builder;
       }
       /**
-       * Protobuf type {@code SnapshotRegionManifest.StoreFile}
+       * Protobuf type {@code hbase.pb.SnapshotRegionManifest.StoreFile}
        */
       public static final class Builder extends
           com.google.protobuf.GeneratedMessage.Builder
          implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder {
         public static final com.google.protobuf.Descriptors.Descriptor
             getDescriptor() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_StoreFile_descriptor;
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor;
         }
 
         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
             internalGetFieldAccessorTable() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_StoreFile_fieldAccessorTable
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable
               .ensureFieldAccessorsInitialized(
                   org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder.class);
         }
@@ -1709,7 +1709,7 @@ public final class SnapshotProtos {
 
         public com.google.protobuf.Descriptors.Descriptor
             getDescriptorForType() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_StoreFile_descriptor;
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor;
         }
 
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile getDefaultInstanceForType() {
@@ -1882,18 +1882,18 @@ public final class SnapshotProtos {
           return this;
         }
 
-        // optional .Reference reference = 2;
+        // optional .hbase.pb.Reference reference = 2;
         private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference reference_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDefaultInstance();
         private com.google.protobuf.SingleFieldBuilder<
             org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder> referenceBuilder_;
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public boolean hasReference() {
           return ((bitField0_ & 0x00000002) == 0x00000002);
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getReference() {
           if (referenceBuilder_ == null) {
@@ -1903,7 +1903,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public Builder setReference(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference value) {
           if (referenceBuilder_ == null) {
@@ -1919,7 +1919,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public Builder setReference(
             org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder builderForValue) {
@@ -1933,7 +1933,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public Builder mergeReference(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference value) {
           if (referenceBuilder_ == null) {
@@ -1952,7 +1952,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public Builder clearReference() {
           if (referenceBuilder_ == null) {
@@ -1965,7 +1965,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder getReferenceBuilder() {
           bitField0_ |= 0x00000002;
@@ -1973,7 +1973,7 @@ public final class SnapshotProtos {
           return getReferenceFieldBuilder().getBuilder();
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder getReferenceOrBuilder() {
           if (referenceBuilder_ != null) {
@@ -1983,7 +1983,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * optional .Reference reference = 2;
+         * optional .hbase.pb.Reference reference = 2;
          */
         private com.google.protobuf.SingleFieldBuilder<
             org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder> 
@@ -2048,7 +2048,7 @@ public final class SnapshotProtos {
           return this;
         }
 
-        // @@protoc_insertion_point(builder_scope:SnapshotRegionManifest.StoreFile)
+        // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRegionManifest.StoreFile)
       }
 
       static {
@@ -2056,7 +2056,7 @@ public final class SnapshotProtos {
         defaultInstance.initFields();
       }
 
-      // @@protoc_insertion_point(class_scope:SnapshotRegionManifest.StoreFile)
+      // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest.StoreFile)
     }
 
     public interface FamilyFilesOrBuilder
@@ -2072,33 +2072,33 @@ public final class SnapshotProtos {
        */
       com.google.protobuf.ByteString getFamilyName();
 
-      // repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+      // repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       java.util.List 
           getStoreFilesList();
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile getStoreFiles(int index);
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       int getStoreFilesCount();
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       java.util.List 
           getStoreFilesOrBuilderList();
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder getStoreFilesOrBuilder(
           int index);
     }
     /**
-     * Protobuf type {@code SnapshotRegionManifest.FamilyFiles}
+     * Protobuf type {@code hbase.pb.SnapshotRegionManifest.FamilyFiles}
      */
     public static final class FamilyFiles extends
         com.google.protobuf.GeneratedMessage
@@ -2178,12 +2178,12 @@ public final class SnapshotProtos {
       }
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_FamilyFiles_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
                 org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder.class);
       }
@@ -2220,36 +2220,36 @@ public final class SnapshotProtos {
         return familyName_;
       }
 
-      // repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+      // repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
       public static final int STORE_FILES_FIELD_NUMBER = 2;
       private java.util.List storeFiles_;
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       public java.util.List getStoreFilesList() {
         return storeFiles_;
       }
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       public java.util.List 
           getStoreFilesOrBuilderList() {
         return storeFiles_;
       }
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       public int getStoreFilesCount() {
         return storeFiles_.size();
       }
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile getStoreFiles(int index) {
         return storeFiles_.get(index);
       }
       /**
-       * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder getStoreFilesOrBuilder(
           int index) {
@@ -2428,19 +2428,19 @@ public final class SnapshotProtos {
         return builder;
       }
       /**
-       * Protobuf type {@code SnapshotRegionManifest.FamilyFiles}
+       * Protobuf type {@code hbase.pb.SnapshotRegionManifest.FamilyFiles}
        */
       public static final class Builder extends
           com.google.protobuf.GeneratedMessage.Builder
          implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder {
         public static final com.google.protobuf.Descriptors.Descriptor
             getDescriptor() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_FamilyFiles_descriptor;
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor;
         }
 
         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
             internalGetFieldAccessorTable() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable
               .ensureFieldAccessorsInitialized(
                   org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder.class);
         }
@@ -2483,7 +2483,7 @@ public final class SnapshotProtos {
 
         public com.google.protobuf.Descriptors.Descriptor
             getDescriptorForType() {
-          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_FamilyFiles_descriptor;
+          return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor;
         }
 
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles getDefaultInstanceForType() {
@@ -2633,7 +2633,7 @@ public final class SnapshotProtos {
           return this;
         }
 
-        // repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+        // repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
         private java.util.List storeFiles_ =
           java.util.Collections.emptyList();
         private void ensureStoreFilesIsMutable() {
@@ -2647,7 +2647,7 @@ public final class SnapshotProtos {
             org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder> storeFilesBuilder_;
 
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public java.util.List getStoreFilesList() {
           if (storeFilesBuilder_ == null) {
@@ -2657,7 +2657,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public int getStoreFilesCount() {
           if (storeFilesBuilder_ == null) {
@@ -2667,7 +2667,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile getStoreFiles(int index) {
           if (storeFilesBuilder_ == null) {
@@ -2677,7 +2677,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder setStoreFiles(
             int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile value) {
@@ -2694,7 +2694,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder setStoreFiles(
             int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder builderForValue) {
@@ -2708,7 +2708,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder addStoreFiles(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile value) {
           if (storeFilesBuilder_ == null) {
@@ -2724,7 +2724,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder addStoreFiles(
             int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile value) {
@@ -2741,7 +2741,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder addStoreFiles(
             org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder builderForValue) {
@@ -2755,7 +2755,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder addStoreFiles(
             int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder builderForValue) {
@@ -2769,7 +2769,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder addAllStoreFiles(
             java.lang.Iterable values) {
@@ -2783,7 +2783,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder clearStoreFiles() {
           if (storeFilesBuilder_ == null) {
@@ -2796,7 +2796,7 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public Builder removeStoreFiles(int index) {
           if (storeFilesBuilder_ == null) {
@@ -2809,14 +2809,14 @@ public final class SnapshotProtos {
           return this;
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder getStoreFilesBuilder(
             int index) {
           return getStoreFilesFieldBuilder().getBuilder(index);
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFileOrBuilder getStoreFilesOrBuilder(
             int index) {
@@ -2826,7 +2826,7 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public java.util.List 
              getStoreFilesOrBuilderList() {
@@ -2837,14 +2837,14 @@ public final class SnapshotProtos {
           }
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder addStoreFilesBuilder() {
           return getStoreFilesFieldBuilder().addBuilder(
               org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.getDefaultInstance());
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.Builder addStoreFilesBuilder(
             int index) {
@@ -2852,7 +2852,7 @@ public final class SnapshotProtos {
               index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.getDefaultInstance());
         }
         /**
-         * repeated .SnapshotRegionManifest.StoreFile store_files = 2;
+         * repeated .hbase.pb.SnapshotRegionManifest.StoreFile store_files = 2;
          */
         public java.util.List 
              getStoreFilesBuilderList() {
@@ -2873,7 +2873,7 @@ public final class SnapshotProtos {
           return storeFilesBuilder_;
         }
 
-        // @@protoc_insertion_point(builder_scope:SnapshotRegionManifest.FamilyFiles)
+        // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRegionManifest.FamilyFiles)
       }
 
       static {
@@ -2881,7 +2881,7 @@ public final class SnapshotProtos {
         defaultInstance.initFields();
       }
 
-      // @@protoc_insertion_point(class_scope:SnapshotRegionManifest.FamilyFiles)
+      // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest.FamilyFiles)
     }
 
     private int bitField0_;
@@ -2901,58 +2901,58 @@ public final class SnapshotProtos {
       return version_;
     }
 
-    // required .RegionInfo region_info = 2;
+    // required .hbase.pb.RegionInfo region_info = 2;
     public static final int REGION_INFO_FIELD_NUMBER = 2;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_;
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     public boolean hasRegionInfo() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
     }
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
       return regionInfo_;
     }
     /**
-     * required .RegionInfo region_info = 2;
+     * required .hbase.pb.RegionInfo region_info = 2;
      */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
       return regionInfo_;
     }
 
-    // repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+    // repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
     public static final int FAMILY_FILES_FIELD_NUMBER = 3;
     private java.util.List familyFiles_;
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     public java.util.List getFamilyFilesList() {
       return familyFiles_;
     }
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     public java.util.List 
         getFamilyFilesOrBuilderList() {
       return familyFiles_;
     }
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     public int getFamilyFilesCount() {
       return familyFiles_.size();
     }
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles getFamilyFiles(int index) {
       return familyFiles_.get(index);
     }
     /**
-     * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+     * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
      */
     public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder getFamilyFilesOrBuilder(
         int index) {
@@ -3152,19 +3152,19 @@ public final class SnapshotProtos {
       return builder;
     }
     /**
-     * Protobuf type {@code SnapshotRegionManifest}
+     * Protobuf type {@code hbase.pb.SnapshotRegionManifest}
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder
        implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
                 org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder.class);
       }
@@ -3214,7 +3214,7 @@ public final class SnapshotProtos {
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotRegionManifest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotRegionManifest_descriptor;
       }
 
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest getDefaultInstanceForType() {
@@ -3376,18 +3376,18 @@ public final class SnapshotProtos {
         return this;
       }
 
-      // required .RegionInfo region_info = 2;
+      // required .hbase.pb.RegionInfo region_info = 2;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public boolean hasRegionInfo() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
         if (regionInfoBuilder_ == null) {
@@ -3397,7 +3397,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
         if (regionInfoBuilder_ == null) {
@@ -3413,7 +3413,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public Builder setRegionInfo(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
@@ -3427,7 +3427,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
         if (regionInfoBuilder_ == null) {
@@ -3446,7 +3446,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public Builder clearRegionInfo() {
         if (regionInfoBuilder_ == null) {
@@ -3459,7 +3459,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() {
         bitField0_ |= 0x00000002;
@@ -3467,7 +3467,7 @@ public final class SnapshotProtos {
         return getRegionInfoFieldBuilder().getBuilder();
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
         if (regionInfoBuilder_ != null) {
@@ -3477,7 +3477,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * required .RegionInfo region_info = 2;
+       * required .hbase.pb.RegionInfo region_info = 2;
        */
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> 
@@ -3493,7 +3493,7 @@ public final class SnapshotProtos {
         return regionInfoBuilder_;
       }
 
-      // repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+      // repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
       private java.util.List familyFiles_ =
         java.util.Collections.emptyList();
       private void ensureFamilyFilesIsMutable() {
@@ -3507,7 +3507,7 @@ public final class SnapshotProtos {
           org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder> familyFilesBuilder_;
 
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public java.util.List getFamilyFilesList() {
         if (familyFilesBuilder_ == null) {
@@ -3517,7 +3517,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public int getFamilyFilesCount() {
         if (familyFilesBuilder_ == null) {
@@ -3527,7 +3527,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles getFamilyFiles(int index) {
         if (familyFilesBuilder_ == null) {
@@ -3537,7 +3537,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder setFamilyFiles(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles value) {
@@ -3554,7 +3554,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder setFamilyFiles(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder builderForValue) {
@@ -3568,7 +3568,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder addFamilyFiles(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles value) {
         if (familyFilesBuilder_ == null) {
@@ -3584,7 +3584,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder addFamilyFiles(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles value) {
@@ -3601,7 +3601,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder addFamilyFiles(
           org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder builderForValue) {
@@ -3615,7 +3615,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder addFamilyFiles(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder builderForValue) {
@@ -3629,7 +3629,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder addAllFamilyFiles(
           java.lang.Iterable values) {
@@ -3643,7 +3643,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder clearFamilyFiles() {
         if (familyFilesBuilder_ == null) {
@@ -3656,7 +3656,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public Builder removeFamilyFiles(int index) {
         if (familyFilesBuilder_ == null) {
@@ -3669,14 +3669,14 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder getFamilyFilesBuilder(
           int index) {
         return getFamilyFilesFieldBuilder().getBuilder(index);
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFilesOrBuilder getFamilyFilesOrBuilder(
           int index) {
@@ -3686,7 +3686,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public java.util.List 
            getFamilyFilesOrBuilderList() {
@@ -3697,14 +3697,14 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder addFamilyFilesBuilder() {
         return getFamilyFilesFieldBuilder().addBuilder(
             org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.getDefaultInstance());
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.Builder addFamilyFilesBuilder(
           int index) {
@@ -3712,7 +3712,7 @@ public final class SnapshotProtos {
             index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles.getDefaultInstance());
       }
       /**
-       * repeated .SnapshotRegionManifest.FamilyFiles family_files = 3;
+       * repeated .hbase.pb.SnapshotRegionManifest.FamilyFiles family_files = 3;
        */
       public java.util.List 
            getFamilyFilesBuilderList() {
@@ -3733,7 +3733,7 @@ public final class SnapshotProtos {
         return familyFilesBuilder_;
       }
 
-      // @@protoc_insertion_point(builder_scope:SnapshotRegionManifest)
+      // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRegionManifest)
     }
 
     static {
@@ -3741,53 +3741,53 @@ public final class SnapshotProtos {
       defaultInstance.initFields();
     }
 
-    // @@protoc_insertion_point(class_scope:SnapshotRegionManifest)
+    // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRegionManifest)
   }
 
   public interface SnapshotDataManifestOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
 
-    // required .TableSchema table_schema = 1;
+    // required .hbase.pb.TableSchema table_schema = 1;
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     boolean hasTableSchema();
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema();
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder();
 
-    // repeated .SnapshotRegionManifest region_manifests = 2;
+    // repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     java.util.List 
         getRegionManifestsList();
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest getRegionManifests(int index);
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     int getRegionManifestsCount();
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     java.util.List 
         getRegionManifestsOrBuilderList();
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder getRegionManifestsOrBuilder(
         int index);
   }
   /**
-   * Protobuf type {@code SnapshotDataManifest}
+   * Protobuf type {@code hbase.pb.SnapshotDataManifest}
    */
   public static final class SnapshotDataManifest extends
       com.google.protobuf.GeneratedMessage
@@ -3875,12 +3875,12 @@ public final class SnapshotProtos {
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotDataManifest_descriptor;
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotDataManifest_fieldAccessorTable
+      return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest.Builder.class);
     }
@@ -3901,58 +3901,58 @@ public final class SnapshotProtos {
     }
 
     private int bitField0_;
-    // required .TableSchema table_schema = 1;
+    // required .hbase.pb.TableSchema table_schema = 1;
     public static final int TABLE_SCHEMA_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_;
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     public boolean hasTableSchema() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() {
       return tableSchema_;
     }
     /**
-     * required .TableSchema table_schema = 1;
+     * required .hbase.pb.TableSchema table_schema = 1;
      */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() {
       return tableSchema_;
     }
 
-    // repeated .SnapshotRegionManifest region_manifests = 2;
+    // repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
     public static final int REGION_MANIFESTS_FIELD_NUMBER = 2;
     private java.util.List regionManifests_;
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     public java.util.List getRegionManifestsList() {
       return regionManifests_;
     }
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     public java.util.List 
         getRegionManifestsOrBuilderList() {
       return regionManifests_;
     }
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     public int getRegionManifestsCount() {
       return regionManifests_.size();
     }
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest getRegionManifests(int index) {
       return regionManifests_.get(index);
     }
     /**
-     * repeated .SnapshotRegionManifest region_manifests = 2;
+     * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
      */
     public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder getRegionManifestsOrBuilder(
         int index) {
@@ -4135,19 +4135,19 @@ public final class SnapshotProtos {
       return builder;
     }
     /**
-     * Protobuf type {@code SnapshotDataManifest}
+     * Protobuf type {@code hbase.pb.SnapshotDataManifest}
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder
        implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifestOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotDataManifest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotDataManifest_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
                 org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest.Builder.class);
       }
@@ -4195,7 +4195,7 @@ public final class SnapshotProtos {
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_SnapshotDataManifest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDataManifest_descriptor;
       }
 
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest getDefaultInstanceForType() {
@@ -4317,18 +4317,18 @@ public final class SnapshotProtos {
       }
       private int bitField0_;
 
-      // required .TableSchema table_schema = 1;
+      // required .hbase.pb.TableSchema table_schema = 1;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_;
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public boolean hasTableSchema() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() {
         if (tableSchemaBuilder_ == null) {
@@ -4338,7 +4338,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
         if (tableSchemaBuilder_ == null) {
@@ -4354,7 +4354,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public Builder setTableSchema(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
@@ -4368,7 +4368,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
         if (tableSchemaBuilder_ == null) {
@@ -4387,7 +4387,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public Builder clearTableSchema() {
         if (tableSchemaBuilder_ == null) {
@@ -4400,7 +4400,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() {
         bitField0_ |= 0x00000001;
@@ -4408,7 +4408,7 @@ public final class SnapshotProtos {
         return getTableSchemaFieldBuilder().getBuilder();
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() {
         if (tableSchemaBuilder_ != null) {
@@ -4418,7 +4418,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * required .TableSchema table_schema = 1;
+       * required .hbase.pb.TableSchema table_schema = 1;
        */
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> 
@@ -4434,7 +4434,7 @@ public final class SnapshotProtos {
         return tableSchemaBuilder_;
       }
 
-      // repeated .SnapshotRegionManifest region_manifests = 2;
+      // repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
       private java.util.List regionManifests_ =
         java.util.Collections.emptyList();
       private void ensureRegionManifestsIsMutable() {
@@ -4448,7 +4448,7 @@ public final class SnapshotProtos {
           org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder> regionManifestsBuilder_;
 
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public java.util.List getRegionManifestsList() {
         if (regionManifestsBuilder_ == null) {
@@ -4458,7 +4458,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public int getRegionManifestsCount() {
         if (regionManifestsBuilder_ == null) {
@@ -4468,7 +4468,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest getRegionManifests(int index) {
         if (regionManifestsBuilder_ == null) {
@@ -4478,7 +4478,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder setRegionManifests(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest value) {
@@ -4495,7 +4495,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder setRegionManifests(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder builderForValue) {
@@ -4509,7 +4509,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder addRegionManifests(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest value) {
         if (regionManifestsBuilder_ == null) {
@@ -4525,7 +4525,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder addRegionManifests(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest value) {
@@ -4542,7 +4542,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder addRegionManifests(
           org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder builderForValue) {
@@ -4556,7 +4556,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder addRegionManifests(
           int index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder builderForValue) {
@@ -4570,7 +4570,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder addAllRegionManifests(
           java.lang.Iterable values) {
@@ -4584,7 +4584,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder clearRegionManifests() {
         if (regionManifestsBuilder_ == null) {
@@ -4597,7 +4597,7 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public Builder removeRegionManifests(int index) {
         if (regionManifestsBuilder_ == null) {
@@ -4610,14 +4610,14 @@ public final class SnapshotProtos {
         return this;
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder getRegionManifestsBuilder(
           int index) {
         return getRegionManifestsFieldBuilder().getBuilder(index);
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifestOrBuilder getRegionManifestsOrBuilder(
           int index) {
@@ -4627,7 +4627,7 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public java.util.List 
            getRegionManifestsOrBuilderList() {
@@ -4638,14 +4638,14 @@ public final class SnapshotProtos {
         }
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder addRegionManifestsBuilder() {
         return getRegionManifestsFieldBuilder().addBuilder(
             org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.getDefaultInstance());
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.Builder addRegionManifestsBuilder(
           int index) {
@@ -4653,7 +4653,7 @@ public final class SnapshotProtos {
             index, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.getDefaultInstance());
       }
       /**
-       * repeated .SnapshotRegionManifest region_manifests = 2;
+       * repeated .hbase.pb.SnapshotRegionManifest region_manifests = 2;
        */
       public java.util.List 
            getRegionManifestsBuilderList() {
@@ -4674,7 +4674,7 @@ public final class SnapshotProtos {
         return regionManifestsBuilder_;
       }
 
-      // @@protoc_insertion_point(builder_scope:SnapshotDataManifest)
+      // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotDataManifest)
     }
 
     static {
@@ -4682,34 +4682,34 @@ public final class SnapshotProtos {
       defaultInstance.initFields();
     }
 
-    // @@protoc_insertion_point(class_scope:SnapshotDataManifest)
+    // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDataManifest)
   }
 
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_SnapshotFileInfo_descriptor;
+    internal_static_hbase_pb_SnapshotFileInfo_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_SnapshotFileInfo_fieldAccessorTable;
+      internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_SnapshotRegionManifest_descriptor;
+    internal_static_hbase_pb_SnapshotRegionManifest_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_SnapshotRegionManifest_fieldAccessorTable;
+      internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_SnapshotRegionManifest_StoreFile_descriptor;
+    internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_SnapshotRegionManifest_StoreFile_fieldAccessorTable;
+      internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_SnapshotRegionManifest_FamilyFiles_descriptor;
+    internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable;
+      internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_SnapshotDataManifest_descriptor;
+    internal_static_hbase_pb_SnapshotDataManifest_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_SnapshotDataManifest_fieldAccessorTable;
+      internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable;
 
   public static com.google.protobuf.Descriptors.FileDescriptor
       getDescriptor() {
@@ -4719,58 +4719,60 @@ public final class SnapshotProtos {
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\016Snapshot.proto\032\010FS.proto\032\013HBase.proto\"" +
-      "\211\001\n\020SnapshotFileInfo\022$\n\004type\030\001 \002(\0162\026.Sna" +
-      "pshotFileInfo.Type\022\r\n\005hfile\030\003 \001(\t\022\022\n\nwal" +
-      "_server\030\004 \001(\t\022\020\n\010wal_name\030\005 \001(\t\"\032\n\004Type\022" +
-      "\t\n\005HFILE\020\001\022\007\n\003WAL\020\002\"\257\002\n\026SnapshotRegionMa" +
-      "nifest\022\017\n\007version\030\001 \001(\005\022 \n\013region_info\030\002" +
-      " \002(\0132\013.RegionInfo\0229\n\014family_files\030\003 \003(\0132" +
-      "#.SnapshotRegionManifest.FamilyFiles\032K\n\t" +
-      "StoreFile\022\014\n\004name\030\001 \002(\t\022\035\n\treference\030\002 \001" +
-      "(\0132\n.Reference\022\021\n\tfile_size\030\003 \001(\004\032Z\n\013Fam",
-      "ilyFiles\022\023\n\013family_name\030\001 \002(\014\0226\n\013store_f" +
-      "iles\030\002 \003(\0132!.SnapshotRegionManifest.Stor" +
-      "eFile\"m\n\024SnapshotDataManifest\022\"\n\014table_s" +
-      "chema\030\001 \002(\0132\014.TableSchema\0221\n\020region_mani" +
-      "fests\030\002 \003(\0132\027.SnapshotRegionManifestBD\n*" +
-      "org.apache.hadoop.hbase.protobuf.generat" +
-      "edB\016SnapshotProtosH\001\210\001\001\240\001\001"
+      "\n\016Snapshot.proto\022\010hbase.pb\032\010FS.proto\032\013HB" +
+      "ase.proto\"\222\001\n\020SnapshotFileInfo\022-\n\004type\030\001" +
+      " \002(\0162\037.hbase.pb.SnapshotFileInfo.Type\022\r\n" +
+      "\005hfile\030\003 \001(\t\022\022\n\nwal_server\030\004 \001(\t\022\020\n\010wal_" +
+      "name\030\005 \001(\t\"\032\n\004Type\022\t\n\005HFILE\020\001\022\007\n\003WAL\020\002\"\323" +
+      "\002\n\026SnapshotRegionManifest\022\017\n\007version\030\001 \001" +
+      "(\005\022)\n\013region_info\030\002 \002(\0132\024.hbase.pb.Regio" +
+      "nInfo\022B\n\014family_files\030\003 \003(\0132,.hbase.pb.S" +
+      "napshotRegionManifest.FamilyFiles\032T\n\tSto" +
+      "reFile\022\014\n\004name\030\001 \002(\t\022&\n\treference\030\002 \001(\0132",
+      "\023.hbase.pb.Reference\022\021\n\tfile_size\030\003 \001(\004\032" +
+      "c\n\013FamilyFiles\022\023\n\013family_name\030\001 \002(\014\022?\n\013s" +
+      "tore_files\030\002 \003(\0132*.hbase.pb.SnapshotRegi" +
+      "onManifest.StoreFile\"\177\n\024SnapshotDataMani" +
+      "fest\022+\n\014table_schema\030\001 \002(\0132\025.hbase.pb.Ta" +
+      "bleSchema\022:\n\020region_manifests\030\002 \003(\0132 .hb" +
+      "ase.pb.SnapshotRegionManifestBD\n*org.apa" +
+      "che.hadoop.hbase.protobuf.generatedB\016Sna" +
+      "pshotProtosH\001\210\001\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
         public com.google.protobuf.ExtensionRegistry assignDescriptors(
             com.google.protobuf.Descriptors.FileDescriptor root) {
           descriptor = root;
-          internal_static_SnapshotFileInfo_descriptor =
+          internal_static_hbase_pb_SnapshotFileInfo_descriptor =
             getDescriptor().getMessageTypes().get(0);
-          internal_static_SnapshotFileInfo_fieldAccessorTable = new
+          internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_SnapshotFileInfo_descriptor,
+              internal_static_hbase_pb_SnapshotFileInfo_descriptor,
               new java.lang.String[] { "Type", "Hfile", "WalServer", "WalName", });
-          internal_static_SnapshotRegionManifest_descriptor =
+          internal_static_hbase_pb_SnapshotRegionManifest_descriptor =
             getDescriptor().getMessageTypes().get(1);
-          internal_static_SnapshotRegionManifest_fieldAccessorTable = new
+          internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_SnapshotRegionManifest_descriptor,
+              internal_static_hbase_pb_SnapshotRegionManifest_descriptor,
               new java.lang.String[] { "Version", "RegionInfo", "FamilyFiles", });
-          internal_static_SnapshotRegionManifest_StoreFile_descriptor =
-            internal_static_SnapshotRegionManifest_descriptor.getNestedTypes().get(0);
-          internal_static_SnapshotRegionManifest_StoreFile_fieldAccessorTable = new
+          internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor =
+            internal_static_hbase_pb_SnapshotRegionManifest_descriptor.getNestedTypes().get(0);
+          internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_SnapshotRegionManifest_StoreFile_descriptor,
+              internal_static_hbase_pb_SnapshotRegionManifest_StoreFile_descriptor,
               new java.lang.String[] { "Name", "Reference", "FileSize", });
-          internal_static_SnapshotRegionManifest_FamilyFiles_descriptor =
-            internal_static_SnapshotRegionManifest_descriptor.getNestedTypes().get(1);
-          internal_static_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable = new
+          internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor =
+            internal_static_hbase_pb_SnapshotRegionManifest_descriptor.getNestedTypes().get(1);
+          internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_SnapshotRegionManifest_FamilyFiles_descriptor,
+              internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor,
               new java.lang.String[] { "FamilyName", "StoreFiles", });
-          internal_static_SnapshotDataManifest_descriptor =
+          internal_static_hbase_pb_SnapshotDataManifest_descriptor =
             getDescriptor().getMessageTypes().get(2);
-          internal_static_SnapshotDataManifest_fieldAccessorTable = new
+          internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_SnapshotDataManifest_descriptor,
+              internal_static_hbase_pb_SnapshotDataManifest_descriptor,
               new java.lang.String[] { "TableSchema", "RegionManifests", });
           return null;
         }
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SwitchProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SwitchProtos.java
new file mode 100644
index 0000000..cf984b3
--- /dev/null
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SwitchProtos.java
@@ -0,0 +1,485 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: Switch.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class SwitchProtos {
+  private SwitchProtos() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface SwitchStateOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // optional bool enabled = 1;
+    /**
+     * optional bool enabled = 1;
+     */
+    boolean hasEnabled();
+    /**
+     * optional bool enabled = 1;
+     */
+    boolean getEnabled();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.SwitchState}
+   */
+  public static final class SwitchState extends
+      com.google.protobuf.GeneratedMessage
+      implements SwitchStateOrBuilder {
+    // Use SwitchState.newBuilder() to construct.
+    private SwitchState(com.google.protobuf.GeneratedMessage.Builder builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private SwitchState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final SwitchState defaultInstance;
+    public static SwitchState getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public SwitchState getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private SwitchState(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              enabled_ = input.readBool();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.internal_static_hbase_pb_SwitchState_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.internal_static_hbase_pb_SwitchState_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState.class, org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser PARSER =
+        new com.google.protobuf.AbstractParser() {
+      public SwitchState parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new SwitchState(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // optional bool enabled = 1;
+    public static final int ENABLED_FIELD_NUMBER = 1;
+    private boolean enabled_;
+    /**
+     * optional bool enabled = 1;
+     */
+    public boolean hasEnabled() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * optional bool enabled = 1;
+     */
+    public boolean getEnabled() {
+      return enabled_;
+    }
+
+    private void initFields() {
+      enabled_ = false;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBool(1, enabled_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBoolSize(1, enabled_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState other = (org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState) obj;
+
+      boolean result = true;
+      result = result && (hasEnabled() == other.hasEnabled());
+      if (hasEnabled()) {
+        result = result && (getEnabled()
+            == other.getEnabled());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasEnabled()) {
+        hash = (37 * hash) + ENABLED_FIELD_NUMBER;
+        hash = (53 * hash) + hashBoolean(getEnabled());
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hbase.pb.SwitchState}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder
+       implements org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchStateOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.internal_static_hbase_pb_SwitchState_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.internal_static_hbase_pb_SwitchState_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState.class, org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        enabled_ = false;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.internal_static_hbase_pb_SwitchState_descriptor;
+      }
+
+      public org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState build() {
+        org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState result = new org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.enabled_ = enabled_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState.getDefaultInstance()) return this;
+        if (other.hasEnabled()) {
+          setEnabled(other.getEnabled());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SwitchProtos.SwitchState) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // optional bool enabled = 1;
+      private boolean enabled_ ;
+      /**
+       * optional bool enabled = 1;
+       */
+      public boolean hasEnabled() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * optional bool enabled = 1;
+       */
+      public boolean getEnabled() {
+        return enabled_;
+      }
+      /**
+       * optional bool enabled = 1;
+       */
+      public Builder setEnabled(boolean value) {
+        bitField0_ |= 0x00000001;
+        enabled_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * optional bool enabled = 1;
+       */
+      public Builder clearEnabled() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        enabled_ = false;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hbase.pb.SwitchState)
+    }
+
+    static {
+      defaultInstance = new SwitchState(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hbase.pb.SwitchState)
+  }
+
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_SwitchState_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_hbase_pb_SwitchState_fieldAccessorTable;
+
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\014Switch.proto\022\010hbase.pb\"\036\n\013SwitchState\022" +
+      "\017\n\007enabled\030\001 \001(\010B?\n*org.apache.hadoop.hb" +
+      "ase.protobuf.generatedB\014SwitchProtosH\001\240\001" +
+      "\001"
+    };
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          internal_static_hbase_pb_SwitchState_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+          internal_static_hbase_pb_SwitchState_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_hbase_pb_SwitchState_descriptor,
+              new java.lang.String[] { "Enabled", });
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+        }, assigner);
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/hbase-protocol/src/main/protobuf/Master.proto b/hbase-protocol/src/main/protobuf/Master.proto
index aa31a5e..6f80c00 100644
--- a/hbase-protocol/src/main/protobuf/Master.proto
+++ b/hbase-protocol/src/main/protobuf/Master.proto
@@ -279,6 +279,29 @@ message IsBalancerEnabledResponse {
   required bool enabled = 1;
 }
 
+enum MasterSwitchType {
+  SPLIT = 0;
+  MERGE = 1;
+}
+
+message SetSwitchesRequest {
+  required bool enabled = 1;
+  optional bool synchronous = 2;
+  repeated MasterSwitchType switch_types = 3;
+}
+
+message SetSwitchesResponse {
+  repeated bool prev_value = 1;
+}
+
+message IsSwitchEnabledRequest {
+  required MasterSwitchType switch_type = 1;
+}
+
+message IsSwitchEnabledResponse {
+  required bool enabled = 1;
+}
+
 message NormalizeRequest {
 }
 
@@ -633,6 +656,19 @@ service MasterService {
     returns(IsBalancerEnabledResponse);
 
   /**
+   * Turn the switches on or off.
+   * If synchronous is true, it waits until current operation call, if outstanding, to return.
+   */
+  rpc SetSwitches(SetSwitchesRequest)
+    returns(SetSwitchesResponse);
+
+  /**
+   * Query whether the switch is on/off.
+   */
+  rpc IsSwitchEnabled(IsSwitchEnabledRequest)
+    returns(IsSwitchEnabledResponse);
+
+  /**
    * Run region normalizer. Can NOT run for various reasons. Check logs.
    */
   rpc Normalize(NormalizeRequest)
diff --git a/hbase-protocol/src/main/protobuf/Switch.proto b/hbase-protocol/src/main/protobuf/Switch.proto
new file mode 100644
index 0000000..ecbeab8
--- /dev/null
+++ b/hbase-protocol/src/main/protobuf/Switch.proto
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This file contains protocol buffers to represent the state of the switch.
+package hbase.pb;
+
+option java_package = "org.apache.hadoop.hbase.protobuf.generated";
+option java_outer_classname = "SwitchProtos";
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+message SwitchState {
+  optional bool enabled = 1;
+}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
index b455828..98043dc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
@@ -59,9 +59,11 @@ import org.apache.hadoop.hbase.RegionStateListener;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.TableState;
+import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
 import org.apache.hadoop.hbase.executor.EventHandler;
 import org.apache.hadoop.hbase.executor.EventType;
 import org.apache.hadoop.hbase.executor.ExecutorService;
@@ -2310,6 +2312,10 @@ public class AssignmentManager {
       return hri.getShortNameToLog() + " is not opening on " + serverName;
     }
 
+    if (!((HMaster)server).getSwitchTracker().isSwitchEnabled(Admin.SwitchType.SPLIT)) {
+      return "split switch is off!";
+    }
+
     // Just return in case of retrying
     if (current.isSplitting()) {
       return null;
@@ -2468,6 +2474,9 @@ public class AssignmentManager {
       return "Merging daughter region already exists, p=" + current;
     }
 
+    if (!((HMaster)server).getSwitchTracker().isSwitchEnabled(Admin.SwitchType.MERGE)) {
+      return "merge switch is off!";
+    }
     // Just return in case of retrying
     if (current != null) {
       return null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 43f8efa..3ed43f1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -76,12 +76,14 @@ import org.apache.hadoop.hbase.TableNotDisabledException;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.UnknownRegionException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.TableState;
 import org.apache.hadoop.hbase.coprocessor.BypassCoprocessorException;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
 import org.apache.hadoop.hbase.executor.ExecutorType;
 import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
@@ -154,6 +156,7 @@ import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
 import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
 import org.apache.hadoop.hbase.zookeeper.RegionNormalizerTracker;
 import org.apache.hadoop.hbase.zookeeper.RegionServerTracker;
+import org.apache.hadoop.hbase.zookeeper.SwitchTrackerManager;
 import org.apache.hadoop.hbase.zookeeper.ZKClusterId;
 import org.apache.hadoop.hbase.zookeeper.ZKUtil;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
@@ -252,6 +255,9 @@ public class HMaster extends HRegionServer implements MasterServices {
   // Tracker for load balancer state
   LoadBalancerTracker loadBalancerTracker;
 
+  // Tracker for split and merge state
+  SwitchTrackerManager switchTracker;
+
   // Tracker for region normalizer state
   private RegionNormalizerTracker regionNormalizerTracker;
 
@@ -577,8 +583,13 @@ public class HMaster extends HRegionServer implements MasterServices {
     this.normalizer.setMasterServices(this);
     this.loadBalancerTracker = new LoadBalancerTracker(zooKeeper, this);
     this.loadBalancerTracker.start();
+
     this.regionNormalizerTracker = new RegionNormalizerTracker(zooKeeper, this);
     this.regionNormalizerTracker.start();
+
+    this.switchTracker = new SwitchTrackerManager(zooKeeper, conf, this);
+    this.switchTracker.start();
+
     this.assignmentManager = new AssignmentManager(this, serverManager,
       this.balancer, this.service, this.metricsMaster,
       this.tableLockManager, tableStateManager);
@@ -2778,6 +2789,20 @@ public class HMaster extends HRegionServer implements MasterServices {
     return null == regionNormalizerTracker? false: regionNormalizerTracker.isNormalizerOn();
   }
 
+
+  /**
+   * Queries the state of the {@link SwitchTrackerManager}. If it is not initialized,
+   * false is returned. If switchType is illegal, false will return.
+   * @param switchType see {@link org.apache.hadoop.hbase.client.Admin.SwitchType}
+   * @return The state of the switch
+   */
+  public boolean isSwitchEnabled(Admin.SwitchType switchType) {
+    if (null == switchTracker) {
+      return false;
+    }
+    return switchTracker.isSwitchEnabled(switchType);
+  }
+
   /**
    * Fetch the configured {@link LoadBalancer} class name. If none is set, a default is returned.
    *
@@ -2794,4 +2819,8 @@ public class HMaster extends HRegionServer implements MasterServices {
   public RegionNormalizerTracker getRegionNormalizerTracker() {
     return regionNormalizerTracker;
   }
+
+  public SwitchTrackerManager getSwitchTracker() {
+    return switchTracker;
+  }
 }
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index e08463f..e2c0012 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.UnknownRegionException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.TableState;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.exceptions.MergeRegionException;
@@ -1505,6 +1506,35 @@ public class MasterRpcServices extends RSRpcServices
   }
 
   @Override
+  public SetSwitchesResponse setSwitches(RpcController controller, SetSwitchesRequest request)
+    throws ServiceException {
+    SetSwitchesResponse.Builder response = SetSwitchesResponse.newBuilder();
+    try {
+      master.checkInitialized();
+      for (MasterSwitchType masterSwitchType : request.getSwitchTypesList()) {
+        Admin.SwitchType switchType = convert(masterSwitchType);
+        boolean oldValue = master.isSwitchEnabled(switchType);
+        boolean newValue = request.getEnabled();
+        master.getSwitchTracker().setSwitchEnabled(newValue, switchType);
+        response.addPrevValue(oldValue);
+      }
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    } catch (KeeperException e) {
+      throw new ServiceException(e);
+    }
+    return response.build();
+  }
+
+  @Override
+  public IsSwitchEnabledResponse isSwitchEnabled(RpcController controller,
+    IsSwitchEnabledRequest request) throws ServiceException {
+    IsSwitchEnabledResponse.Builder response = IsSwitchEnabledResponse.newBuilder();
+    response.setEnabled(master.isSwitchEnabled(convert(request.getSwitchType())));
+    return response.build();
+  }
+
+  @Override
   public NormalizeResponse normalize(RpcController controller,
       NormalizeRequest request) throws ServiceException {
     try {
@@ -1573,4 +1603,16 @@ public class MasterRpcServices extends RSRpcServices
     }
     return response.build();
   }
+
+  private Admin.SwitchType convert(MasterProtos.MasterSwitchType switchType) {
+    switch (switchType) {
+      case SPLIT:
+        return Admin.SwitchType.SPLIT;
+      case MERGE:
+        return Admin.SwitchType.MERGE;
+      default:
+        break;
+    }
+    return null;
+  }
 }
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index bd5ca9d..5363f3d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -235,6 +235,7 @@ public class HBaseFsck extends Configured implements Closeable {
   private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older
   private static boolean forceExclusive = false; // only this hbck can modify HBase
   private static boolean disableBalancer = false; // disable load balancer to keep regions stable
+  private static boolean disableSplitAndMerge = false; // disable split and merge
   private boolean fixAssignments = false; // fix assignment errors?
   private boolean fixMeta = false; // fix meta errors?
   private boolean checkHdfs = true; // load and check fs consistency?
@@ -683,6 +684,11 @@ public class HBaseFsck extends Configured implements Closeable {
     if (shouldDisableBalancer()) {
       oldBalancer = admin.setBalancerRunning(false, true);
     }
+    boolean[] oldSplitAndMerge = null;
+    if (shouldDisableSplitAndMerge()) {
+      oldSplitAndMerge = admin.setSwitches(false, false,
+        Admin.SwitchType.SPLIT, Admin.SwitchType.MERGE);
+    }
 
     try {
       onlineConsistencyRepair();
@@ -694,6 +700,17 @@ public class HBaseFsck extends Configured implements Closeable {
       if (shouldDisableBalancer() && oldBalancer) {
         admin.setBalancerRunning(oldBalancer, false);
       }
+
+      if (shouldDisableSplitAndMerge()) {
+        if (oldSplitAndMerge != null) {
+          if (oldSplitAndMerge[0]) {
+            admin.setSwitches(true, false, Admin.SwitchType.SPLIT);
+          }
+          if (oldSplitAndMerge[1]) {
+            admin.setSwitches(true, false, Admin.SwitchType.MERGE);
+          }
+        }
+      }
     }
 
     if (checkRegionBoundaries) {
@@ -4184,6 +4201,13 @@ public class HBaseFsck extends Configured implements Closeable {
   }
 
   /**
+   * Disable the split and merge
+   */
+  public static void setDisableSplitAndMerge() {
+    disableSplitAndMerge = true;
+  }
+
+  /**
    * The balancer should be disabled if we are modifying HBase.
    * It can be disabled if you want to prevent region movement from causing
    * false positives.
@@ -4193,6 +4217,15 @@ public class HBaseFsck extends Configured implements Closeable {
   }
 
   /**
+   * The split and merge should be disabled if we are modifying HBase.
+   * It can be disabled if you want to prevent region movement from causing
+   * false positives.
+   */
+  public boolean shouldDisableSplitAndMerge() {
+    return fixAny || disableSplitAndMerge;
+  }
+
+  /**
    * Set summary mode.
    * Print only summary of the tables and status (OK or INCONSISTENT)
    */
@@ -4551,6 +4584,8 @@ public class HBaseFsck extends Configured implements Closeable {
         setForceExclusive();
       } else if (cmd.equals("-disableBalancer")) {
         setDisableBalancer();
+      }  else if (cmd.equals("-disableSplitAndMerge")) {
+        setDisableSplitAndMerge();
       } else if (cmd.equals("-timelag")) {
         if (i == args.length - 1) {
           errors.reportError(ERROR_CODE.WRONG_USAGE, "HBaseFsck: -timelag needs a value.");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/SwitchTrackerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/SwitchTrackerManager.java
new file mode 100644
index 0000000..fcbd9ef
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/SwitchTrackerManager.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.zookeeper;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Abortable;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.SwitchProtos;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.zookeeper.KeeperException;
+
+import java.io.IOException;
+
+/**
+ * Tracks the switch states in ZK, for exameple, split/merge switch
+ *
+ * TODO: we should move balance and normalizer state into this class
+ */
+@InterfaceAudience.Private
+public class SwitchTrackerManager {
+
+  private String splitZnode;
+  private String mergeZnode;
+
+  private SwitchStateTracker splitStateTracker;
+  private SwitchStateTracker mergeStateTracker;
+
+  public SwitchTrackerManager(ZooKeeperWatcher watcher, Configuration conf, Abortable abortable) {
+    try {
+      if (ZKUtil.checkExists(watcher, watcher.getSwitchZNode()) < 0) {
+        ZKUtil.createAndFailSilent(watcher, watcher.getSwitchZNode());
+      }
+    } catch (KeeperException e) {
+      throw new RuntimeException(e);
+    }
+    splitZnode = ZKUtil.joinZNode(watcher.getSwitchZNode(),
+      conf.get("zookeeper.znode.switch.split", "split"));
+    mergeZnode = ZKUtil.joinZNode(watcher.getSwitchZNode(),
+      conf.get("zookeeper.znode.switch.merge", "merge"));
+    splitStateTracker = new SwitchStateTracker(watcher, splitZnode, abortable);
+    mergeStateTracker = new SwitchStateTracker(watcher, mergeZnode, abortable);
+  }
+
+  public void start() {
+    splitStateTracker.start();
+    mergeStateTracker.start();
+  }
+
+  public boolean isSwitchEnabled(Admin.SwitchType switchType) {
+    switch (switchType) {
+      case SPLIT:
+        return splitStateTracker.isSwitchEnabled();
+      case MERGE:
+        return mergeStateTracker.isSwitchEnabled();
+      default:
+        break;
+    }
+    return false;
+  }
+
+  public void setSwitchEnabled(boolean enabled, Admin.SwitchType switchType)
+    throws KeeperException {
+    switch (switchType) {
+      case SPLIT:
+        splitStateTracker.setSwitchEnabled(enabled);
+        break;
+      case MERGE:
+        mergeStateTracker.setSwitchEnabled(enabled);
+        break;
+      default:
+        break;
+    }
+  }
+
+  class SwitchStateTracker extends ZooKeeperNodeTracker {
+
+    public SwitchStateTracker(ZooKeeperWatcher watcher, String node, Abortable abortable) {
+      super(watcher, node, abortable);
+    }
+
+    /**
+     * Return true if the switch is on, false otherwise
+     */
+    public boolean isSwitchEnabled() {
+      byte [] upData = super.getData(false);
+      try {
+        // if data in ZK is null, use default of on.
+        return upData == null || parseFrom(upData).getEnabled();
+      } catch (DeserializationException dex) {
+        LOG.error("ZK state for LoadBalancer could not be parsed " + Bytes.toStringBinary(upData));
+        // return false to be safe.
+        return false;
+      }
+    }
+
+    /**
+     * Set the switch on/off
+     * @param enabled switch enabled or not?
+     * @throws KeeperException keepException will be thrown out
+     */
+    public void setSwitchEnabled(boolean enabled) throws KeeperException {
+      byte [] upData = toByteArray(enabled);
+      try {
+        ZKUtil.setData(watcher, node, upData);
+      } catch(KeeperException.NoNodeException nne) {
+        ZKUtil.createAndWatch(watcher, node, upData);
+      }
+      super.nodeDataChanged(node);
+    }
+
+    private byte [] toByteArray(boolean enabled) {
+      SwitchProtos.SwitchState.Builder builder = SwitchProtos.SwitchState.newBuilder();
+      builder.setEnabled(enabled);
+      return ProtobufUtil.prependPBMagic(builder.build().toByteArray());
+    }
+
+    private SwitchProtos.SwitchState parseFrom(byte [] bytes)
+      throws DeserializationException {
+      ProtobufUtil.expectPBMagicPrefix(bytes);
+      SwitchProtos.SwitchState.Builder builder = SwitchProtos.SwitchState.newBuilder();
+      try {
+        int magicLen = ProtobufUtil.lengthOfPBMagic();
+        ProtobufUtil.mergeFrom(builder, bytes, magicLen, bytes.length - magicLen);
+      } catch (IOException e) {
+        throw new DeserializationException(e);
+      }
+      return builder.build();
+    }
+  }
+
+
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSwitchStatus.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSwitchStatus.java
new file mode 100644
index 0000000..a2eab08
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSwitchStatus.java
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.Region;
+import org.apache.hadoop.hbase.testclassification.ClientTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.JVMClusterUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.io.IOException;
+import java.util.List;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+@Category({MediumTests.class, ClientTests.class})
+public class TestSwitchStatus {
+
+  private static final Log LOG = LogFactory.getLog(TestSwitchStatus.class);
+  private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+  private static byte [] FAMILY = Bytes.toBytes("testFamily");
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+    TEST_UTIL.startMiniCluster(2);
+  }
+
+  /**
+   * @throws java.lang.Exception
+   */
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+    TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testSplitSwitch() throws Exception {
+    TableName name = TableName.valueOf("testSplitSwitch");
+    Table t = TEST_UTIL.createTable(name, FAMILY);
+    TEST_UTIL.loadTable(t, FAMILY, false);
+
+    RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(t.getName());
+    int orignalCount = locator.getAllRegionLocations().size();
+
+    Admin admin = TEST_UTIL.getAdmin();
+    initSwitchStatus(admin);
+    admin.setSwitches(false, false, Admin.SwitchType.SPLIT);
+    admin.split(t.getName());
+    int count = waitOnSplitOrMerge(t).size();
+    assertTrue(orignalCount == count);
+
+    admin.setSwitches(true, false, Admin.SwitchType.SPLIT);
+    admin.split(t.getName());
+    count = waitOnSplitOrMerge(t).size();
+    assertTrue(orignalCount regions = admin.getTableRegions(t.getName());
+    assertTrue(regions.size() > 1);
+    admin.mergeRegions(regions.get(0).getEncodedNameAsBytes(),
+      regions.get(1).getEncodedNameAsBytes(), true);
+    int count = waitOnSplitOrMerge(t).size();
+    assertTrue(orignalCount == count);
+
+    waitForMergable(admin, name);
+    admin.setSwitches(true, false, Admin.SwitchType.MERGE);
+    admin.mergeRegions(regions.get(0).getEncodedNameAsBytes(),
+      regions.get(1).getEncodedNameAsBytes(), true);
+    count = waitOnSplitOrMerge(t).size();
+    assertTrue(orignalCount>count);
+    admin.close();
+  }
+
+  @Test
+  public void testMultiSwitches() throws IOException {
+    Admin admin = TEST_UTIL.getAdmin();
+    boolean[] switches = admin.setSwitches(false, false,
+      Admin.SwitchType.SPLIT, Admin.SwitchType.MERGE);
+    for (boolean s : switches){
+      assertTrue(s);
+    }
+    assertFalse(admin.isSwitchEnabled(Admin.SwitchType.SPLIT));
+    assertFalse(admin.isSwitchEnabled(Admin.SwitchType.MERGE));
+    admin.close();
+  }
+
+  private void initSwitchStatus(Admin admin) throws IOException {
+    if (!admin.isSwitchEnabled(Admin.SwitchType.SPLIT)) {
+      admin.setSwitches(true, false, Admin.SwitchType.SPLIT);
+    }
+    if (!admin.isSwitchEnabled(Admin.SwitchType.MERGE)) {
+      admin.setSwitches(true, false, Admin.SwitchType.MERGE);
+    }
+    assertTrue(admin.isSwitchEnabled(Admin.SwitchType.SPLIT));
+    assertTrue(admin.isSwitchEnabled(Admin.SwitchType.MERGE));
+  }
+
+  private void waitForMergable(Admin admin, TableName t) throws InterruptedException, IOException {
+    // Wait for the Regions to be mergeable
+    MiniHBaseCluster miniCluster = TEST_UTIL.getMiniHBaseCluster();
+    int mergeable = 0;
+    while (mergeable < 2) {
+      Thread.sleep(100);
+      admin.majorCompact(t);
+      mergeable = 0;
+      for (JVMClusterUtil.RegionServerThread regionThread: miniCluster.getRegionServerThreads()) {
+        for (Region region: regionThread.getRegionServer().getOnlineRegions(t)) {
+          mergeable += ((HRegion)region).isMergeable() ? 1 : 0;
+        }
+      }
+    }
+  }
+
+  /*
+   * Wait on table split.  May return because we waited long enough on the split
+   * and it didn't happen.  Caller should check.
+   * @param t
+   * @return Map of table regions; caller needs to check table actually split.
+   */
+  private List waitOnSplitOrMerge(final Table t)
+    throws IOException {
+    try (RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(t.getName())) {
+      List regions = locator.getAllRegionLocations();
+      int originalCount = regions.size();
+      for (int i = 0; i < TEST_UTIL.getConfiguration().getInt("hbase.test.retries", 10); i++) {
+        Thread.currentThread();
+        try {
+          Thread.sleep(1000);
+        } catch (InterruptedException e) {
+          e.printStackTrace();
+        }
+        regions = locator.getAllRegionLocations();
+        if (regions.size() !=  originalCount)
+          break;
+      }
+      return regions;
+    }
+  }
+
+}
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb b/hbase-shell/src/main/ruby/hbase/admin.rb
index 82f0700..e8b2bce 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -132,6 +132,38 @@ module Hbase
       end
     end
 
+    #----------------------------------------------------------------------------------------------
+    # Enable/disable one switch
+    # Returns previous switch setting.
+    def set_switch(type, enableDisable)
+      switch_type = nil
+      if type == "SPLIT"
+        switch_type = org.apache.hadoop.hbase.client.Admin::SwitchType::SPLIT
+      elsif type == "MERGE"
+        switch_type = org.apache.hadoop.hbase.client.Admin::SwitchType::MERGE
+      else
+        raise ArgumentError, "only SPLIT or MERGE accepted for type!"
+      end
+      @admin.setSwitches(
+        java.lang.Boolean::valueOf(enableDisable), java.lang.Boolean::valueOf(false),
+        switch_type)
+    end
+
+    #----------------------------------------------------------------------------------------------
+    # Query the current state of the switch.
+    # Returns the switch's state (true is enabled).
+    def get_switch(type)
+      switch_type = nil
+      if type == "SPLIT"
+        switch_type = org.apache.hadoop.hbase.client.Admin::SwitchType::SPLIT
+      elsif type == "MERGE"
+        switch_type = org.apache.hadoop.hbase.client.Admin::SwitchType::MERGE
+      else
+        raise ArgumentError, "only SPLIT or MERGE accepted for type!"
+      end
+      @admin.isSwitchEnabled(switch_type)
+    end
+
     def locate_region(table_name, row_key)
       locator = @connection.getRegionLocator(TableName.valueOf(table_name))
       begin
diff --git a/hbase-shell/src/main/ruby/shell.rb b/hbase-shell/src/main/ruby/shell.rb
index 0ecd3d7..6e3af5e 100644
--- a/hbase-shell/src/main/ruby/shell.rb
+++ b/hbase-shell/src/main/ruby/shell.rb
@@ -333,6 +333,8 @@ Shell.load_command_group(
     catalogjanitor_enabled
     compact_rs
     trace
+    get_switch
+    set_switch
   ],
   # TODO remove older hlog_roll command
   :aliases => {
diff --git a/hbase-shell/src/main/ruby/shell/commands/get_switch.rb b/hbase-shell/src/main/ruby/shell/commands/get_switch.rb
new file mode 100644
index 0000000..d63180e
--- /dev/null
+++ b/hbase-shell/src/main/ruby/shell/commands/get_switch.rb
@@ -0,0 +1,41 @@
+#!/usr/bin/env hbase-jruby
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with this
+# work for additional information regarding copyright ownership. The ASF
+# licenses this file to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# Prints the current split status
+
+module Shell
+  module Commands
+    class GetSwitch < Command
+      def help
+        return <<-EOF
+Query the switch's state. You can set switch type, 'SPLIT' or 'MERGE'
+Examples:
+
+  hbase> get_switch 'SPLIT'
+EOF
+      end
+
+      def command(switch_type)
+        format_simple_command do
+          formatter.row([
+            admin.get_switch(switch_type)? "true" : "false"
+          ])
+        end
+      end
+    end
+  end
+end
diff --git a/hbase-shell/src/main/ruby/shell/commands/set_switch.rb b/hbase-shell/src/main/ruby/shell/commands/set_switch.rb
new file mode 100644
index 0000000..077a6a3
--- /dev/null
+++ b/hbase-shell/src/main/ruby/shell/commands/set_switch.rb
@@ -0,0 +1,42 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+module Shell
+  module Commands
+    class SetSwitch < Command
+      def help
+        return <<-EOF
+Enable/Disable one switch. You can set switch type 'SPLIT' or 'MERGE'. Returns previous split state.
+Examples:
+
+  hbase> set_switch 'SPLIT', true
+  hbase> set_switch 'SPLIT', false
+EOF
+      end
+
+      def command(switch_type, enableDisable)
+        format_simple_command do
+          formatter.row([
+            admin.set_switch(switch_type, enableDisable)? "true" : "false"
+          ])
+        end
+      end
+    end
+  end
+end
-- 
1.9.3 (Apple Git-50)