From 0fb0491a0204c85453aa49bc539555598ea03571 Mon Sep 17 00:00:00 2001 From: Ajay Jadhav Date: Fri, 27 Jan 2017 12:43:25 -0800 Subject: [PATCH] Add mechanism to control hbase cleaner behavior --- .../java/org/apache/hadoop/hbase/client/Admin.java | 24 + .../hadoop/hbase/client/CleanerChoreState.java | 62 + .../hbase/client/ConnectionImplementation.java | 25 +- .../org/apache/hadoop/hbase/client/HBaseAdmin.java | 40 +- .../hbase/shaded/protobuf/RequestConverter.java | 39 + .../hbase/shaded/protobuf/ResponseConverter.java | 9 + .../shaded/protobuf/generated/MasterProtos.java | 5036 ++++++++++++++++---- .../src/main/protobuf/Master.proto | 45 + .../org/apache/hadoop/hbase/master/HMaster.java | 33 +- .../hadoop/hbase/master/MasterRpcServices.java | 39 + .../hadoop/hbase/master/cleaner/CleanerChore.java | 23 + .../hbase/master/cleaner/TestCleanerChore.java | 66 + hbase-shell/src/main/ruby/hbase/admin.rb | 20 + hbase-shell/src/main/ruby/shell.rb | 3 + .../main/ruby/shell/commands/cleaner_chore_run.rb | 35 + .../ruby/shell/commands/cleaner_chore_state.rb | 36 + .../ruby/shell/commands/cleaner_chore_switch.rb | 37 + 17 files changed, 4588 insertions(+), 984 deletions(-) create mode 100644 hbase-client/src/main/java/org/apache/hadoop/hbase/client/CleanerChoreState.java create mode 100644 hbase-shell/src/main/ruby/shell/commands/cleaner_chore_run.rb create mode 100644 hbase-shell/src/main/ruby/shell/commands/cleaner_chore_state.rb create mode 100644 hbase-shell/src/main/ruby/shell/commands/cleaner_chore_switch.rb diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index 583ee52..d2ade18 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -897,6 +897,30 @@ public interface Admin extends Abortable, Closeable { boolean isCatalogJanitorEnabled() throws IOException; /** + * Enable/Disable the cleaner chore + * + * @param on if true enables the cleaner chore + * @return the previous state + * @throws IOException + */ + public boolean setCleanerChoreRunning(final boolean on) throws IOException; + + /** + * Ask for cleaner chore to run + * + * @return True if cleaner chore ran, false otherwise + * @throws IOException + */ + public boolean runCleanerChore() throws IOException; + + /** + * Query on the cleaner chore state + * + * @throws IOException + */ + public CleanerChoreState getCleanerChoreState() throws IOException; + + /** * Merge two regions. Asynchronous operation. * * @param nameOfRegionA encoded or full name of region a diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CleanerChoreState.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CleanerChoreState.java new file mode 100644 index 0000000..64e44cb --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CleanerChoreState.java @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; + +/** + * POJO representing the cleaner chore state + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public enum CleanerChoreState { + HFILE_CLEANER_ENABLED(0), + LOG_CLEANER_ENABLED(1), + BOTH_HFILE_LOG_CLEANER_ENABLED(2), + BOTH_HFILE_LOG_CLEANER_DISABLED(3); + + private int value; + + public int getValue() { + return value; + } + + public String getName() { + return toString(); + } + + private CleanerChoreState(int value) { + this.value = value; + } + + public static CleanerChoreState valueOf(int value) { + switch (value) { + case 0: + return HFILE_CLEANER_ENABLED; + case 1: + return LOG_CLEANER_ENABLED; + case 2: + return BOTH_HFILE_LOG_CLEANER_ENABLED; + case 3: + return BOTH_HFILE_LOG_CLEANER_DISABLED; + default: + throw new IllegalArgumentException("Unknown CleanerChoreState value " + value); + } + } +}; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index ca21365..9084203 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -80,12 +80,12 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientServ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest; @@ -1444,6 +1444,27 @@ class ConnectionImplementation implements ClusterConnection, Closeable { } @Override + public MasterProtos.RunCleanerChoreResponse runCleanerChore(RpcController controller, + MasterProtos.RunCleanerChoreRequest request) + throws ServiceException { + return stub.runCleanerChore(controller, request); + } + + @Override + public MasterProtos.SetCleanerChoreRunningResponse setCleanerChoreRunning( + RpcController controller, MasterProtos.SetCleanerChoreRunningRequest request) + throws ServiceException { + return stub.setCleanerChoreRunning(controller, request); + } + + @Override + public MasterProtos.GetCleanerChoreStateResponse getCleanerChoreState( + RpcController controller, MasterProtos.GetCleanerChoreStateRequest request) + throws ServiceException { + return stub.getCleanerChoreState(controller, request); + } + + @Override public ClientProtos.CoprocessorServiceResponse execMasterService( RpcController controller, ClientProtos.CoprocessorServiceRequest request) throws ServiceException { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 6e1f254..a3930f5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -58,8 +58,8 @@ import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.ProcedureInfo; -import org.apache.hadoop.hbase.RegionLocations; import org.apache.hadoop.hbase.RegionLoad; +import org.apache.hadoop.hbase.RegionLocations; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableName; @@ -130,6 +130,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTabl import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest; @@ -1459,6 +1460,43 @@ public class HBaseAdmin implements Admin { }); } + @Override + public boolean setCleanerChoreRunning(final boolean on) throws IOException { + return executeCallable(new MasterCallable(getConnection(), getRpcControllerFactory()) { + @Override public Boolean rpcCall() throws Exception { + return master.setCleanerChoreRunning(getRpcController(), RequestConverter + .buildSetCleanerChoreRunningRequest( + on)).getPrevValue(); + } + }); + } + + @Override + public boolean runCleanerChore() throws IOException { + return executeCallable(new MasterCallable(getConnection(), getRpcControllerFactory()) { + @Override public Boolean rpcCall() throws Exception { + return master.runCleanerChore(getRpcController(), + RequestConverter.buildRunCleanerChoreRequest()) + .getCleanerChoreRan(); + } + }); + } + + @Override + public CleanerChoreState getCleanerChoreState() throws IOException { + return executeCallable( + new MasterCallable(getConnection(), getRpcControllerFactory()) { + @Override public CleanerChoreState rpcCall() throws Exception { + GetCleanerChoreStateResponse response = master + .getCleanerChoreState( + getRpcController(), + RequestConverter + .buildGetCleanerChoreStateRequest()); + return CleanerChoreState.valueOf(response.getCleanerChoreState().getNumber()); + } + }); + } + private boolean isEncodedRegionName(byte[] regionName) throws IOException { try { HRegionInfo.parseRegionName(regionName); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java index 8de9ad8..21ddaa9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java @@ -85,7 +85,9 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColu import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; @@ -103,6 +105,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegion import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest; @@ -1452,6 +1455,42 @@ public final class RequestConverter { } /** + * @see {@link #buildCleanerChoreRequest} + */ + private static final RunCleanerChoreRequest CLEANER_CHORE_REQUEST = + RunCleanerChoreRequest.newBuilder().build(); + + /** + * Creates a request for running cleaner chore + * @return A {@link RunCleanerChoreRequest} + */ + public static RunCleanerChoreRequest buildRunCleanerChoreRequest() { + return CLEANER_CHORE_REQUEST; + } + + /** + * Creates a request for enabling/disabling the cleaner chore + * @return A {@link SetCleanerChoreRunningRequest} + */ + public static SetCleanerChoreRunningRequest buildSetCleanerChoreRunningRequest(boolean on) { + return SetCleanerChoreRunningRequest.newBuilder().setOn(on).build(); + } + + /** + * @see {@link #buildGetCleanerChoreStateRequest()} + */ + private static final GetCleanerChoreStateRequest GET_CLEANER_CHORE_STATE_REQUEST = + GetCleanerChoreStateRequest.newBuilder().build(); + + /** + * Creates a request for querying the master about state of cleaner chore + * @return A {@link GetCleanerChoreStateRequest} + */ + public static GetCleanerChoreStateRequest buildGetCleanerChoreStateRequest() { + return GET_CLEANER_CHORE_STATE_REQUEST; + } + + /** * Creates a request for querying the master the last flushed sequence Id for a region * @param regionName * @return A {@link GetLastFlushedSequenceIdRequest} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java index a83667d..cbcad80 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java @@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pa import org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse; import org.apache.hadoop.hbase.regionserver.RegionOpeningState; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; @@ -314,6 +315,14 @@ public final class ResponseConverter { return EnableCatalogJanitorResponse.newBuilder().setPrevValue(prevValue).build(); } + /** + * Creates a response for the cleaner chore request + * @return A RunCleanerChoreResponse + */ + public static RunCleanerChoreResponse buildRunCleanerChoreResponse(boolean ran) { + return RunCleanerChoreResponse.newBuilder().setCleanerChoreRan(ran).build(); + } + // End utilities for Admin /** diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java index 6a737b8..f551e33 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java @@ -39057,35 +39057,22 @@ public final class MasterProtos { } - public interface SnapshotRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotRequest) + public interface RunCleanerChoreRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RunCleanerChoreRequest) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - boolean hasSnapshot(); - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** - * Protobuf type {@code hbase.pb.SnapshotRequest} + * Protobuf type {@code hbase.pb.RunCleanerChoreRequest} */ - public static final class SnapshotRequest extends + public static final class RunCleanerChoreRequest extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotRequest) - SnapshotRequestOrBuilder { - // Use SnapshotRequest.newBuilder() to construct. - private SnapshotRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + // @@protoc_insertion_point(message_implements:hbase.pb.RunCleanerChoreRequest) + RunCleanerChoreRequestOrBuilder { + // Use RunCleanerChoreRequest.newBuilder() to construct. + private RunCleanerChoreRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } - private SnapshotRequest() { + private RunCleanerChoreRequest() { } @java.lang.Override @@ -39093,12 +39080,11 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private SnapshotRequest( + private RunCleanerChoreRequest( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); - int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -39116,19 +39102,6 @@ public final class MasterProtos { } break; } - case 10: { - org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - subBuilder = snapshot_.toBuilder(); - } - snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(snapshot_); - snapshot_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000001; - break; - } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { @@ -39143,36 +39116,14 @@ public final class MasterProtos { } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_descriptor; + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCleanerChoreRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCleanerChoreRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.Builder.class); - } - - private int bitField0_; - public static final int SNAPSHOT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - public boolean hasSnapshot() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { - return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; - } - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { - return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest.Builder.class); } private byte memoizedIsInitialized = -1; @@ -39181,23 +39132,12 @@ public final class MasterProtos { if (isInitialized == 1) return true; if (isInitialized == 0) return false; - if (!hasSnapshot()) { - memoizedIsInitialized = 0; - return false; - } - if (!getSnapshot().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, getSnapshot()); - } unknownFields.writeTo(output); } @@ -39206,10 +39146,6 @@ public final class MasterProtos { if (size != -1) return size; size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getSnapshot()); - } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -39221,17 +39157,12 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest) obj; + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest) obj; boolean result = true; - result = result && (hasSnapshot() == other.hasSnapshot()); - if (hasSnapshot()) { - result = result && getSnapshot() - .equals(other.getSnapshot()); - } result = result && unknownFields.equals(other.unknownFields); return result; } @@ -39243,67 +39174,63 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasSnapshot()) { - hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; - hash = (53 * hash) + getSnapshot().hashCode(); - } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -39315,7 +39242,7 @@ public final class MasterProtos { public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { @@ -39330,25 +39257,25 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.SnapshotRequest} + * Protobuf type {@code hbase.pb.RunCleanerChoreRequest} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotRequest) - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequestOrBuilder { + // @@protoc_insertion_point(builder_implements:hbase.pb.RunCleanerChoreRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequestOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_descriptor; + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCleanerChoreRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCleanerChoreRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.Builder.class); + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -39361,50 +39288,32 @@ public final class MasterProtos { private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { - getSnapshotFieldBuilder(); } } public Builder clear() { super.clear(); - if (snapshotBuilder_ == null) { - snapshot_ = null; - } else { - snapshotBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_descriptor; + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCleanerChoreRequest_descriptor; } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest build() { - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest result = buildPartial(); + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest buildPartial() { - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (snapshotBuilder_ == null) { - result.snapshot_ = snapshot_; - } else { - result.snapshot_ = snapshotBuilder_.build(); - } - result.bitField0_ = to_bitField0_; + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest(this); onBuilt(); return result; } @@ -39436,31 +39345,22 @@ public final class MasterProtos { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest) { - return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest)other); + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest other) { - if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance()) return this; - if (other.hasSnapshot()) { - mergeSnapshot(other.getSnapshot()); - } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { - if (!hasSnapshot()) { - return false; - } - if (!getSnapshot().isInitialized()) { - return false; - } return true; } @@ -39468,11 +39368,11 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parsedMessage = null; + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -39481,125 +39381,6 @@ public final class MasterProtos { } return this; } - private int bitField0_; - - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - public boolean hasSnapshot() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { - if (snapshotBuilder_ == null) { - return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; - } else { - return snapshotBuilder_.getMessage(); - } - } - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { - if (snapshotBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - snapshot_ = value; - onChanged(); - } else { - snapshotBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - public Builder setSnapshot( - org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { - if (snapshotBuilder_ == null) { - snapshot_ = builderForValue.build(); - onChanged(); - } else { - snapshotBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { - if (snapshotBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - snapshot_ != null && - snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { - snapshot_ = - org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); - } else { - snapshot_ = value; - } - onChanged(); - } else { - snapshotBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - public Builder clearSnapshot() { - if (snapshotBuilder_ == null) { - snapshot_ = null; - onChanged(); - } else { - snapshotBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getSnapshotFieldBuilder().getBuilder(); - } - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { - if (snapshotBuilder_ != null) { - return snapshotBuilder_.getMessageOrBuilder(); - } else { - return snapshot_ == null ? - org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; - } - } - /** - * required .hbase.pb.SnapshotDescription snapshot = 1; - */ - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> - getSnapshotFieldBuilder() { - if (snapshotBuilder_ == null) { - snapshotBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( - getSnapshot(), - getParentForChildren(), - isClean()); - snapshot_ = null; - } - return snapshotBuilder_; - } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); @@ -39611,70 +39392,70 @@ public final class MasterProtos { } - // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRequest) + // @@protoc_insertion_point(builder_scope:hbase.pb.RunCleanerChoreRequest) } - // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRequest) - private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest DEFAULT_INSTANCE; + // @@protoc_insertion_point(class_scope:hbase.pb.RunCleanerChoreRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest(); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest getDefaultInstance() { + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest getDefaultInstance() { return DEFAULT_INSTANCE; } - @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public SnapshotRequest parsePartialFrom( + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public RunCleanerChoreRequest parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new SnapshotRequest(input, extensionRegistry); + return new RunCleanerChoreRequest(input, extensionRegistry); } }; - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { return PARSER; } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest getDefaultInstanceForType() { + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } - public interface SnapshotResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotResponse) + public interface RunCleanerChoreResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RunCleanerChoreResponse) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { /** - * required int64 expected_timeout = 1; + * required bool cleaner_chore_ran = 1; */ - boolean hasExpectedTimeout(); + boolean hasCleanerChoreRan(); /** - * required int64 expected_timeout = 1; + * required bool cleaner_chore_ran = 1; */ - long getExpectedTimeout(); + boolean getCleanerChoreRan(); } /** - * Protobuf type {@code hbase.pb.SnapshotResponse} + * Protobuf type {@code hbase.pb.RunCleanerChoreResponse} */ - public static final class SnapshotResponse extends + public static final class RunCleanerChoreResponse extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotResponse) - SnapshotResponseOrBuilder { - // Use SnapshotResponse.newBuilder() to construct. - private SnapshotResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + // @@protoc_insertion_point(message_implements:hbase.pb.RunCleanerChoreResponse) + RunCleanerChoreResponseOrBuilder { + // Use RunCleanerChoreResponse.newBuilder() to construct. + private RunCleanerChoreResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } - private SnapshotResponse() { - expectedTimeout_ = 0L; + private RunCleanerChoreResponse() { + cleanerChoreRan_ = false; } @java.lang.Override @@ -39682,7 +39463,7 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private SnapshotResponse( + private RunCleanerChoreResponse( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { @@ -39707,7 +39488,7 @@ public final class MasterProtos { } case 8: { bitField0_ |= 0x00000001; - expectedTimeout_ = input.readInt64(); + cleanerChoreRan_ = input.readBool(); break; } } @@ -39724,30 +39505,30 @@ public final class MasterProtos { } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_descriptor; + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCleanerChoreResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCleanerChoreResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.Builder.class); + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.Builder.class); } private int bitField0_; - public static final int EXPECTED_TIMEOUT_FIELD_NUMBER = 1; - private long expectedTimeout_; + public static final int CLEANER_CHORE_RAN_FIELD_NUMBER = 1; + private boolean cleanerChoreRan_; /** - * required int64 expected_timeout = 1; + * required bool cleaner_chore_ran = 1; */ - public boolean hasExpectedTimeout() { + public boolean hasCleanerChoreRan() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required int64 expected_timeout = 1; + * required bool cleaner_chore_ran = 1; */ - public long getExpectedTimeout() { - return expectedTimeout_; + public boolean getCleanerChoreRan() { + return cleanerChoreRan_; } private byte memoizedIsInitialized = -1; @@ -39756,7 +39537,7 @@ public final class MasterProtos { if (isInitialized == 1) return true; if (isInitialized == 0) return false; - if (!hasExpectedTimeout()) { + if (!hasCleanerChoreRan()) { memoizedIsInitialized = 0; return false; } @@ -39767,7 +39548,7 @@ public final class MasterProtos { public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt64(1, expectedTimeout_); + output.writeBool(1, cleanerChoreRan_); } unknownFields.writeTo(output); } @@ -39779,7 +39560,7 @@ public final class MasterProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeInt64Size(1, expectedTimeout_); + .computeBoolSize(1, cleanerChoreRan_); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -39792,16 +39573,16 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse) obj; + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse) obj; boolean result = true; - result = result && (hasExpectedTimeout() == other.hasExpectedTimeout()); - if (hasExpectedTimeout()) { - result = result && (getExpectedTimeout() - == other.getExpectedTimeout()); + result = result && (hasCleanerChoreRan() == other.hasCleanerChoreRan()); + if (hasCleanerChoreRan()) { + result = result && (getCleanerChoreRan() + == other.getCleanerChoreRan()); } result = result && unknownFields.equals(other.unknownFields); return result; @@ -39814,68 +39595,68 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasExpectedTimeout()) { - hash = (37 * hash) + EXPECTED_TIMEOUT_FIELD_NUMBER; - hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( - getExpectedTimeout()); + if (hasCleanerChoreRan()) { + hash = (37 * hash) + CLEANER_CHORE_RAN_FIELD_NUMBER; + hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( + getCleanerChoreRan()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -39887,7 +39668,7 @@ public final class MasterProtos { public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { @@ -39902,25 +39683,25 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.SnapshotResponse} + * Protobuf type {@code hbase.pb.RunCleanerChoreResponse} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotResponse) - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponseOrBuilder { + // @@protoc_insertion_point(builder_implements:hbase.pb.RunCleanerChoreResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponseOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_descriptor; + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCleanerChoreResponse_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCleanerChoreResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.Builder.class); + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -39937,36 +39718,36 @@ public final class MasterProtos { } public Builder clear() { super.clear(); - expectedTimeout_ = 0L; + cleanerChoreRan_ = false; bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_descriptor; + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_RunCleanerChoreResponse_descriptor; } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse build() { - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse result = buildPartial(); + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse buildPartial() { - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse(this); + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.expectedTimeout_ = expectedTimeout_; + result.cleanerChoreRan_ = cleanerChoreRan_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -39999,18 +39780,18 @@ public final class MasterProtos { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse) { - return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse)other); + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse other) { - if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance()) return this; - if (other.hasExpectedTimeout()) { - setExpectedTimeout(other.getExpectedTimeout()); + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.getDefaultInstance()) return this; + if (other.hasCleanerChoreRan()) { + setCleanerChoreRan(other.getCleanerChoreRan()); } this.mergeUnknownFields(other.unknownFields); onChanged(); @@ -40018,7 +39799,7 @@ public final class MasterProtos { } public final boolean isInitialized() { - if (!hasExpectedTimeout()) { + if (!hasCleanerChoreRan()) { return false; } return true; @@ -40028,11 +39809,11 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parsedMessage = null; + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -40043,34 +39824,34 @@ public final class MasterProtos { } private int bitField0_; - private long expectedTimeout_ ; + private boolean cleanerChoreRan_ ; /** - * required int64 expected_timeout = 1; + * required bool cleaner_chore_ran = 1; */ - public boolean hasExpectedTimeout() { + public boolean hasCleanerChoreRan() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required int64 expected_timeout = 1; + * required bool cleaner_chore_ran = 1; */ - public long getExpectedTimeout() { - return expectedTimeout_; + public boolean getCleanerChoreRan() { + return cleanerChoreRan_; } /** - * required int64 expected_timeout = 1; + * required bool cleaner_chore_ran = 1; */ - public Builder setExpectedTimeout(long value) { + public Builder setCleanerChoreRan(boolean value) { bitField0_ |= 0x00000001; - expectedTimeout_ = value; + cleanerChoreRan_ = value; onChanged(); return this; } /** - * required int64 expected_timeout = 1; + * required bool cleaner_chore_ran = 1; */ - public Builder clearExpectedTimeout() { + public Builder clearCleanerChoreRan() { bitField0_ = (bitField0_ & ~0x00000001); - expectedTimeout_ = 0L; + cleanerChoreRan_ = false; onChanged(); return this; } @@ -40085,60 +39866,70 @@ public final class MasterProtos { } - // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotResponse) + // @@protoc_insertion_point(builder_scope:hbase.pb.RunCleanerChoreResponse) } - // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotResponse) - private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse DEFAULT_INSTANCE; + // @@protoc_insertion_point(class_scope:hbase.pb.RunCleanerChoreResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse(); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse getDefaultInstance() { + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse getDefaultInstance() { return DEFAULT_INSTANCE; } - @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public SnapshotResponse parsePartialFrom( + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public RunCleanerChoreResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new SnapshotResponse(input, extensionRegistry); + return new RunCleanerChoreResponse(input, extensionRegistry); } }; - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { return PARSER; } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse getDefaultInstanceForType() { + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } - public interface GetCompletedSnapshotsRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:hbase.pb.GetCompletedSnapshotsRequest) + public interface SetCleanerChoreRunningRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SetCleanerChoreRunningRequest) org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * required bool on = 1; + */ + boolean hasOn(); + /** + * required bool on = 1; + */ + boolean getOn(); } /** - * Protobuf type {@code hbase.pb.GetCompletedSnapshotsRequest} + * Protobuf type {@code hbase.pb.SetCleanerChoreRunningRequest} */ - public static final class GetCompletedSnapshotsRequest extends + public static final class SetCleanerChoreRunningRequest extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:hbase.pb.GetCompletedSnapshotsRequest) - GetCompletedSnapshotsRequestOrBuilder { - // Use GetCompletedSnapshotsRequest.newBuilder() to construct. - private GetCompletedSnapshotsRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + // @@protoc_insertion_point(message_implements:hbase.pb.SetCleanerChoreRunningRequest) + SetCleanerChoreRunningRequestOrBuilder { + // Use SetCleanerChoreRunningRequest.newBuilder() to construct. + private SetCleanerChoreRunningRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } - private GetCompletedSnapshotsRequest() { + private SetCleanerChoreRunningRequest() { + on_ = false; } @java.lang.Override @@ -40146,11 +39937,12 @@ public final class MasterProtos { getUnknownFields() { return this.unknownFields; } - private GetCompletedSnapshotsRequest( + private SetCleanerChoreRunningRequest( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); + int mutable_bitField0_ = 0; org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -40168,6 +39960,11 @@ public final class MasterProtos { } break; } + case 8: { + bitField0_ |= 0x00000001; + on_ = input.readBool(); + break; + } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { @@ -40182,14 +39979,30 @@ public final class MasterProtos { } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor; + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetCleanerChoreRunningRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetCleanerChoreRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.Builder.class); + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest.Builder.class); + } + + private int bitField0_; + public static final int ON_FIELD_NUMBER = 1; + private boolean on_; + /** + * required bool on = 1; + */ + public boolean hasOn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool on = 1; + */ + public boolean getOn() { + return on_; } private byte memoizedIsInitialized = -1; @@ -40198,12 +40011,19 @@ public final class MasterProtos { if (isInitialized == 1) return true; if (isInitialized == 0) return false; + if (!hasOn()) { + memoizedIsInitialized = 0; + return false; + } memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, on_); + } unknownFields.writeTo(output); } @@ -40212,6 +40032,10 @@ public final class MasterProtos { if (size != -1) return size; size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeBoolSize(1, on_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -40223,12 +40047,17 @@ public final class MasterProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest) obj; + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest) obj; boolean result = true; + result = result && (hasOn() == other.hasOn()); + if (hasOn()) { + result = result && (getOn() + == other.getOn()); + } result = result && unknownFields.equals(other.unknownFields); return result; } @@ -40240,63 +40069,68 @@ public final class MasterProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasOn()) { + hash = (37 * hash) + ON_FIELD_NUMBER; + hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( + getOn()); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -40308,7 +40142,7 @@ public final class MasterProtos { public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { @@ -40323,25 +40157,25 @@ public final class MasterProtos { return builder; } /** - * Protobuf type {@code hbase.pb.GetCompletedSnapshotsRequest} + * Protobuf type {@code hbase.pb.SetCleanerChoreRunningRequest} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:hbase.pb.GetCompletedSnapshotsRequest) - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequestOrBuilder { + // @@protoc_insertion_point(builder_implements:hbase.pb.SetCleanerChoreRunningRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequestOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor; + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetCleanerChoreRunningRequest_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetCleanerChoreRunningRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.Builder.class); + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -40358,28 +40192,2946 @@ public final class MasterProtos { } public Builder clear() { super.clear(); + on_ = false; + bitField0_ = (bitField0_ & ~0x00000001); return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor; + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetCleanerChoreRunningRequest_descriptor; } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest build() { - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest result = buildPartial(); + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest buildPartial() { - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest(this); + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.on_ = on_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest.getDefaultInstance()) return this; + if (other.hasOn()) { + setOn(other.getOn()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (!hasOn()) { + return false; + } + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private boolean on_ ; + /** + * required bool on = 1; + */ + public boolean hasOn() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool on = 1; + */ + public boolean getOn() { + return on_; + } + /** + * required bool on = 1; + */ + public Builder setOn(boolean value) { + bitField0_ |= 0x00000001; + on_ = value; + onChanged(); + return this; + } + /** + * required bool on = 1; + */ + public Builder clearOn() { + bitField0_ = (bitField0_ & ~0x00000001); + on_ = false; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.SetCleanerChoreRunningRequest) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.SetCleanerChoreRunningRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public SetCleanerChoreRunningRequest parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new SetCleanerChoreRunningRequest(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface SetCleanerChoreRunningResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SetCleanerChoreRunningResponse) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * optional bool prev_value = 1; + */ + boolean hasPrevValue(); + /** + * optional bool prev_value = 1; + */ + boolean getPrevValue(); + } + /** + * Protobuf type {@code hbase.pb.SetCleanerChoreRunningResponse} + */ + public static final class SetCleanerChoreRunningResponse extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SetCleanerChoreRunningResponse) + SetCleanerChoreRunningResponseOrBuilder { + // Use SetCleanerChoreRunningResponse.newBuilder() to construct. + private SetCleanerChoreRunningResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private SetCleanerChoreRunningResponse() { + prevValue_ = false; + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SetCleanerChoreRunningResponse( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + prevValue_ = input.readBool(); + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetCleanerChoreRunningResponse_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetCleanerChoreRunningResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.Builder.class); + } + + private int bitField0_; + public static final int PREV_VALUE_FIELD_NUMBER = 1; + private boolean prevValue_; + /** + * optional bool prev_value = 1; + */ + public boolean hasPrevValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional bool prev_value = 1; + */ + public boolean getPrevValue() { + return prevValue_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, prevValue_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeBoolSize(1, prevValue_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse) obj; + + boolean result = true; + result = result && (hasPrevValue() == other.hasPrevValue()); + if (hasPrevValue()) { + result = result && (getPrevValue() + == other.getPrevValue()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPrevValue()) { + hash = (37 * hash) + PREV_VALUE_FIELD_NUMBER; + hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( + getPrevValue()); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.SetCleanerChoreRunningResponse} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SetCleanerChoreRunningResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponseOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetCleanerChoreRunningResponse_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetCleanerChoreRunningResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + prevValue_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SetCleanerChoreRunningResponse_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.prevValue_ = prevValue_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.getDefaultInstance()) return this; + if (other.hasPrevValue()) { + setPrevValue(other.getPrevValue()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private boolean prevValue_ ; + /** + * optional bool prev_value = 1; + */ + public boolean hasPrevValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional bool prev_value = 1; + */ + public boolean getPrevValue() { + return prevValue_; + } + /** + * optional bool prev_value = 1; + */ + public Builder setPrevValue(boolean value) { + bitField0_ |= 0x00000001; + prevValue_ = value; + onChanged(); + return this; + } + /** + * optional bool prev_value = 1; + */ + public Builder clearPrevValue() { + bitField0_ = (bitField0_ & ~0x00000001); + prevValue_ = false; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.SetCleanerChoreRunningResponse) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.SetCleanerChoreRunningResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public SetCleanerChoreRunningResponse parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new SetCleanerChoreRunningResponse(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface GetCleanerChoreStateRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetCleanerChoreStateRequest) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code hbase.pb.GetCleanerChoreStateRequest} + */ + public static final class GetCleanerChoreStateRequest extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetCleanerChoreStateRequest) + GetCleanerChoreStateRequestOrBuilder { + // Use GetCleanerChoreStateRequest.newBuilder() to construct. + private GetCleanerChoreStateRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GetCleanerChoreStateRequest() { + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetCleanerChoreStateRequest( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCleanerChoreStateRequest_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCleanerChoreStateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest.Builder.class); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest) obj; + + boolean result = true; + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.GetCleanerChoreStateRequest} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetCleanerChoreStateRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequestOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCleanerChoreStateRequest_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCleanerChoreStateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCleanerChoreStateRequest_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest(this); + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.GetCleanerChoreStateRequest) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.GetCleanerChoreStateRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public GetCleanerChoreStateRequest parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new GetCleanerChoreStateRequest(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface GetCleanerChoreStateResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetCleanerChoreStateResponse) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * required .hbase.pb.GetCleanerChoreStateResponse.CleanerChoreState cleaner_chore_state = 1; + */ + boolean hasCleanerChoreState(); + /** + * required .hbase.pb.GetCleanerChoreStateResponse.CleanerChoreState cleaner_chore_state = 1; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState getCleanerChoreState(); + } + /** + * Protobuf type {@code hbase.pb.GetCleanerChoreStateResponse} + */ + public static final class GetCleanerChoreStateResponse extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetCleanerChoreStateResponse) + GetCleanerChoreStateResponseOrBuilder { + // Use GetCleanerChoreStateResponse.newBuilder() to construct. + private GetCleanerChoreStateResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GetCleanerChoreStateResponse() { + cleanerChoreState_ = 0; + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetCleanerChoreStateResponse( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState value = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + cleanerChoreState_ = rawValue; + } + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCleanerChoreStateResponse_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCleanerChoreStateResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.Builder.class); + } + + /** + * Protobuf enum {@code hbase.pb.GetCleanerChoreStateResponse.CleanerChoreState} + */ + public enum CleanerChoreState + implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { + /** + * HFILE_CLEANER_ENABLED = 0; + */ + HFILE_CLEANER_ENABLED(0), + /** + * LOGFILE_CLEANER_ENABLED = 1; + */ + LOGFILE_CLEANER_ENABLED(1), + /** + * BOTH_HFILE_LOGFILE_CLEANER_ENABLED = 2; + */ + BOTH_HFILE_LOGFILE_CLEANER_ENABLED(2), + /** + * BOTH_HFILE_LOGFILE_CLEANER_DISABLED = 3; + */ + BOTH_HFILE_LOGFILE_CLEANER_DISABLED(3), + ; + + /** + * HFILE_CLEANER_ENABLED = 0; + */ + public static final int HFILE_CLEANER_ENABLED_VALUE = 0; + /** + * LOGFILE_CLEANER_ENABLED = 1; + */ + public static final int LOGFILE_CLEANER_ENABLED_VALUE = 1; + /** + * BOTH_HFILE_LOGFILE_CLEANER_ENABLED = 2; + */ + public static final int BOTH_HFILE_LOGFILE_CLEANER_ENABLED_VALUE = 2; + /** + * BOTH_HFILE_LOGFILE_CLEANER_DISABLED = 3; + */ + public static final int BOTH_HFILE_LOGFILE_CLEANER_DISABLED_VALUE = 3; + + + public final int getNumber() { + return value; + } + + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static CleanerChoreState valueOf(int value) { + return forNumber(value); + } + + public static CleanerChoreState forNumber(int value) { + switch (value) { + case 0: return HFILE_CLEANER_ENABLED; + case 1: return LOGFILE_CLEANER_ENABLED; + case 2: return BOTH_HFILE_LOGFILE_CLEANER_ENABLED; + case 3: return BOTH_HFILE_LOGFILE_CLEANER_DISABLED; + default: return null; + } + } + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< + CleanerChoreState> internalValueMap = + new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap() { + public CleanerChoreState findValueByNumber(int number) { + return CleanerChoreState.forNumber(number); + } + }; + + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.getDescriptor().getEnumTypes().get(0); + } + + private static final CleanerChoreState[] VALUES = values(); + + public static CleanerChoreState valueOf( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private CleanerChoreState(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:hbase.pb.GetCleanerChoreStateResponse.CleanerChoreState) + } + + private int bitField0_; + public static final int CLEANER_CHORE_STATE_FIELD_NUMBER = 1; + private int cleanerChoreState_; + /** + * required .hbase.pb.GetCleanerChoreStateResponse.CleanerChoreState cleaner_chore_state = 1; + */ + public boolean hasCleanerChoreState() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.GetCleanerChoreStateResponse.CleanerChoreState cleaner_chore_state = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState getCleanerChoreState() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState result = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState.valueOf(cleanerChoreState_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState.HFILE_CLEANER_ENABLED : result; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (!hasCleanerChoreState()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, cleanerChoreState_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeEnumSize(1, cleanerChoreState_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse) obj; + + boolean result = true; + result = result && (hasCleanerChoreState() == other.hasCleanerChoreState()); + if (hasCleanerChoreState()) { + result = result && cleanerChoreState_ == other.cleanerChoreState_; + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCleanerChoreState()) { + hash = (37 * hash) + CLEANER_CHORE_STATE_FIELD_NUMBER; + hash = (53 * hash) + cleanerChoreState_; + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.GetCleanerChoreStateResponse} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetCleanerChoreStateResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponseOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCleanerChoreStateResponse_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCleanerChoreStateResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + cleanerChoreState_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCleanerChoreStateResponse_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.cleanerChoreState_ = cleanerChoreState_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.getDefaultInstance()) return this; + if (other.hasCleanerChoreState()) { + setCleanerChoreState(other.getCleanerChoreState()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (!hasCleanerChoreState()) { + return false; + } + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private int cleanerChoreState_ = 0; + /** + * required .hbase.pb.GetCleanerChoreStateResponse.CleanerChoreState cleaner_chore_state = 1; + */ + public boolean hasCleanerChoreState() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.GetCleanerChoreStateResponse.CleanerChoreState cleaner_chore_state = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState getCleanerChoreState() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState result = org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState.valueOf(cleanerChoreState_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState.HFILE_CLEANER_ENABLED : result; + } + /** + * required .hbase.pb.GetCleanerChoreStateResponse.CleanerChoreState cleaner_chore_state = 1; + */ + public Builder setCleanerChoreState(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + cleanerChoreState_ = value.getNumber(); + onChanged(); + return this; + } + /** + * required .hbase.pb.GetCleanerChoreStateResponse.CleanerChoreState cleaner_chore_state = 1; + */ + public Builder clearCleanerChoreState() { + bitField0_ = (bitField0_ & ~0x00000001); + cleanerChoreState_ = 0; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.GetCleanerChoreStateResponse) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.GetCleanerChoreStateResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public GetCleanerChoreStateResponse parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new GetCleanerChoreStateResponse(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface SnapshotRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotRequest) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + boolean hasSnapshot(); + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot(); + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); + } + /** + * Protobuf type {@code hbase.pb.SnapshotRequest} + */ + public static final class SnapshotRequest extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotRequest) + SnapshotRequestOrBuilder { + // Use SnapshotRequest.newBuilder() to construct. + private SnapshotRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private SnapshotRequest() { + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SnapshotRequest( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = snapshot_.toBuilder(); + } + snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(snapshot_); + snapshot_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.Builder.class); + } + + private int bitField0_; + public static final int SNAPSHOT_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_; + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + public boolean hasSnapshot() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; + } + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (!hasSnapshot()) { + memoizedIsInitialized = 0; + return false; + } + if (!getSnapshot().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, getSnapshot()); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getSnapshot()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest) obj; + + boolean result = true; + result = result && (hasSnapshot() == other.hasSnapshot()); + if (hasSnapshot()) { + result = result && getSnapshot() + .equals(other.getSnapshot()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasSnapshot()) { + hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; + hash = (53 * hash) + getSnapshot().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.SnapshotRequest} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequestOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getSnapshotFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (snapshotBuilder_ == null) { + snapshot_ = null; + } else { + snapshotBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotRequest_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (snapshotBuilder_ == null) { + result.snapshot_ = snapshot_; + } else { + result.snapshot_ = snapshotBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance()) return this; + if (other.hasSnapshot()) { + mergeSnapshot(other.getSnapshot()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (!hasSnapshot()) { + return false; + } + if (!getSnapshot().isInitialized()) { + return false; + } + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null; + private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_; + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + public boolean hasSnapshot() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() { + if (snapshotBuilder_ == null) { + return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; + } else { + return snapshotBuilder_.getMessage(); + } + } + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { + if (snapshotBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + snapshot_ = value; + onChanged(); + } else { + snapshotBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + public Builder setSnapshot( + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) { + if (snapshotBuilder_ == null) { + snapshot_ = builderForValue.build(); + onChanged(); + } else { + snapshotBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) { + if (snapshotBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + snapshot_ != null && + snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) { + snapshot_ = + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial(); + } else { + snapshot_ = value; + } + onChanged(); + } else { + snapshotBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + public Builder clearSnapshot() { + if (snapshotBuilder_ == null) { + snapshot_ = null; + onChanged(); + } else { + snapshotBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getSnapshotFieldBuilder().getBuilder(); + } + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() { + if (snapshotBuilder_ != null) { + return snapshotBuilder_.getMessageOrBuilder(); + } else { + return snapshot_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_; + } + } + /** + * required .hbase.pb.SnapshotDescription snapshot = 1; + */ + private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> + getSnapshotFieldBuilder() { + if (snapshotBuilder_ == null) { + snapshotBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>( + getSnapshot(), + getParentForChildren(), + isClean()); + snapshot_ = null; + } + return snapshotBuilder_; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotRequest) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public SnapshotRequest parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new SnapshotRequest(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface SnapshotResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotResponse) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * required int64 expected_timeout = 1; + */ + boolean hasExpectedTimeout(); + /** + * required int64 expected_timeout = 1; + */ + long getExpectedTimeout(); + } + /** + * Protobuf type {@code hbase.pb.SnapshotResponse} + */ + public static final class SnapshotResponse extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotResponse) + SnapshotResponseOrBuilder { + // Use SnapshotResponse.newBuilder() to construct. + private SnapshotResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private SnapshotResponse() { + expectedTimeout_ = 0L; + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SnapshotResponse( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + expectedTimeout_ = input.readInt64(); + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.Builder.class); + } + + private int bitField0_; + public static final int EXPECTED_TIMEOUT_FIELD_NUMBER = 1; + private long expectedTimeout_; + /** + * required int64 expected_timeout = 1; + */ + public boolean hasExpectedTimeout() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required int64 expected_timeout = 1; + */ + public long getExpectedTimeout() { + return expectedTimeout_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (!hasExpectedTimeout()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt64(1, expectedTimeout_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeInt64Size(1, expectedTimeout_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse) obj; + + boolean result = true; + result = result && (hasExpectedTimeout() == other.hasExpectedTimeout()); + if (hasExpectedTimeout()) { + result = result && (getExpectedTimeout() + == other.getExpectedTimeout()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasExpectedTimeout()) { + hash = (37 * hash) + EXPECTED_TIMEOUT_FIELD_NUMBER; + hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( + getExpectedTimeout()); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.SnapshotResponse} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponseOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + expectedTimeout_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_SnapshotResponse_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.expectedTimeout_ = expectedTimeout_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance()) return this; + if (other.hasExpectedTimeout()) { + setExpectedTimeout(other.getExpectedTimeout()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (!hasExpectedTimeout()) { + return false; + } + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private long expectedTimeout_ ; + /** + * required int64 expected_timeout = 1; + */ + public boolean hasExpectedTimeout() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required int64 expected_timeout = 1; + */ + public long getExpectedTimeout() { + return expectedTimeout_; + } + /** + * required int64 expected_timeout = 1; + */ + public Builder setExpectedTimeout(long value) { + bitField0_ |= 0x00000001; + expectedTimeout_ = value; + onChanged(); + return this; + } + /** + * required int64 expected_timeout = 1; + */ + public Builder clearExpectedTimeout() { + bitField0_ = (bitField0_ & ~0x00000001); + expectedTimeout_ = 0L; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotResponse) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public SnapshotResponse parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new SnapshotResponse(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface GetCompletedSnapshotsRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.GetCompletedSnapshotsRequest) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code hbase.pb.GetCompletedSnapshotsRequest} + */ + public static final class GetCompletedSnapshotsRequest extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.GetCompletedSnapshotsRequest) + GetCompletedSnapshotsRequestOrBuilder { + // Use GetCompletedSnapshotsRequest.newBuilder() to construct. + private GetCompletedSnapshotsRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GetCompletedSnapshotsRequest() { + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetCompletedSnapshotsRequest( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.Builder.class); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest) obj; + + boolean result = true; + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.GetCompletedSnapshotsRequest} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.GetCompletedSnapshotsRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequestOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest(this); onBuilt(); return result; } @@ -67875,6 +70627,44 @@ public final class MasterProtos { /** *
+       ** Get a run of the CleanerChore
+       * 
+ * + * rpc RunCleanerChore(.hbase.pb.RunCleanerChoreRequest) returns (.hbase.pb.RunCleanerChoreResponse); + */ + public abstract void runCleanerChore( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done); + + /** + *
+       **
+       * Enable the CleanerChore on or off.
+       * 
+ * + * rpc SetCleanerChoreRunning(.hbase.pb.SetCleanerChoreRunningRequest) returns (.hbase.pb.SetCleanerChoreRunningResponse); + */ + public abstract void setCleanerChoreRunning( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done); + + /** + *
+       **
+       * Query the state of CleanerChore.
+       * 
+ * + * rpc GetCleanerChoreState(.hbase.pb.GetCleanerChoreStateRequest) returns (.hbase.pb.GetCleanerChoreStateResponse); + */ + public abstract void getCleanerChoreState( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done); + + /** + *
        **
        * Call a master coprocessor endpoint
        * 
@@ -68557,6 +71347,30 @@ public final class MasterProtos { } @java.lang.Override + public void runCleanerChore( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { + impl.runCleanerChore(controller, request, done); + } + + @java.lang.Override + public void setCleanerChoreRunning( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { + impl.setCleanerChoreRunning(controller, request, done); + } + + @java.lang.Override + public void getCleanerChoreState( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { + impl.getCleanerChoreState(controller, request, done); + } + + @java.lang.Override public void execMasterService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, @@ -68917,72 +71731,78 @@ public final class MasterProtos { case 32: return impl.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)request); case 33: - return impl.execMasterService(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); + return impl.runCleanerChore(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest)request); case 34: - return impl.snapshot(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest)request); + return impl.setCleanerChoreRunning(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest)request); case 35: - return impl.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request); + return impl.getCleanerChoreState(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest)request); case 36: - return impl.deleteSnapshot(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request); + return impl.execMasterService(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); case 37: - return impl.isSnapshotDone(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request); + return impl.snapshot(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest)request); case 38: - return impl.restoreSnapshot(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request); + return impl.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request); case 39: - return impl.execProcedure(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest)request); + return impl.deleteSnapshot(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request); case 40: - return impl.execProcedureWithRet(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest)request); + return impl.isSnapshotDone(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request); case 41: - return impl.isProcedureDone(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request); + return impl.restoreSnapshot(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request); case 42: - return impl.modifyNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request); + return impl.execProcedure(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest)request); case 43: - return impl.createNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest)request); + return impl.execProcedureWithRet(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest)request); case 44: - return impl.deleteNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request); + return impl.isProcedureDone(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request); case 45: - return impl.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request); + return impl.modifyNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request); case 46: - return impl.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request); + return impl.createNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest)request); case 47: - return impl.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request); + return impl.deleteNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request); case 48: - return impl.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request); + return impl.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request); case 49: - return impl.getTableState(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest)request); + return impl.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request); case 50: - return impl.setQuota(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest)request); + return impl.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request); case 51: - return impl.getLastMajorCompactionTimestamp(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)request); + return impl.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request); case 52: - return impl.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request); + return impl.getTableState(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest)request); case 53: - return impl.getProcedureResult(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest)request); + return impl.setQuota(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest)request); case 54: - return impl.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request); + return impl.getLastMajorCompactionTimestamp(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)request); case 55: - return impl.abortProcedure(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest)request); + return impl.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request); case 56: - return impl.listProcedures(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest)request); + return impl.getProcedureResult(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest)request); case 57: - return impl.addReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest)request); + return impl.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request); case 58: - return impl.removeReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest)request); + return impl.abortProcedure(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest)request); case 59: - return impl.enableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest)request); + return impl.listProcedures(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest)request); case 60: - return impl.disableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest)request); + return impl.addReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest)request); case 61: - return impl.getReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest)request); + return impl.removeReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest)request); case 62: - return impl.updateReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest)request); + return impl.enableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest)request); case 63: - return impl.listReplicationPeers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest)request); + return impl.disableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest)request); case 64: - return impl.listDrainingRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest)request); + return impl.getReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest)request); case 65: - return impl.drainRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest)request); + return impl.updateReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest)request); case 66: + return impl.listReplicationPeers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest)request); + case 67: + return impl.listDrainingRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest)request); + case 68: + return impl.drainRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest)request); + case 69: return impl.removeDrainFromRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest)request); default: throw new java.lang.AssertionError("Can't get here."); @@ -69065,72 +71885,78 @@ public final class MasterProtos { case 32: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); case 33: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest.getDefaultInstance(); case 34: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest.getDefaultInstance(); case 35: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest.getDefaultInstance(); case 36: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 37: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance(); case 38: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); case 39: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance(); case 40: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance(); case 41: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance(); case 42: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); case 43: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); case 44: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance(); case 45: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance(); case 46: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance(); case 47: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance(); case 48: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance(); case 49: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance(); case 50: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance(); case 51: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance(); case 52: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance(); case 53: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance(); case 54: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance(); case 55: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance(); case 56: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance(); case 57: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance(); case 58: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance(); case 59: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance(); case 60: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest.getDefaultInstance(); case 61: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest.getDefaultInstance(); case 62: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest.getDefaultInstance(); case 63: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest.getDefaultInstance(); case 64: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest.getDefaultInstance(); case 65: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest.getDefaultInstance(); case 66: + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest.getDefaultInstance(); + case 67: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest.getDefaultInstance(); + case 68: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest.getDefaultInstance(); + case 69: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -69213,72 +72039,78 @@ public final class MasterProtos { case 32: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); case 33: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.getDefaultInstance(); case 34: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.getDefaultInstance(); case 35: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.getDefaultInstance(); case 36: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 37: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(); case 38: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(); case 39: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(); case 40: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(); case 41: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(); case 42: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); case 43: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); case 44: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(); case 45: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(); case 46: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(); case 47: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(); case 48: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(); case 49: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(); case 50: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(); case 51: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(); case 52: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance(); case 53: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance(); case 54: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(); case 55: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(); case 56: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance(); case 57: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance(); case 58: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance(); case 59: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(); case 60: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance(); case 61: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance(); case 62: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance(); case 63: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance(); case 64: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance(); case 65: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance(); case 66: + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance(); + case 67: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance(); + case 68: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance(); + case 69: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -69709,6 +72541,44 @@ public final class MasterProtos { /** *
+     ** Get a run of the CleanerChore
+     * 
+ * + * rpc RunCleanerChore(.hbase.pb.RunCleanerChoreRequest) returns (.hbase.pb.RunCleanerChoreResponse); + */ + public abstract void runCleanerChore( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done); + + /** + *
+     **
+     * Enable the CleanerChore on or off.
+     * 
+ * + * rpc SetCleanerChoreRunning(.hbase.pb.SetCleanerChoreRunningRequest) returns (.hbase.pb.SetCleanerChoreRunningResponse); + */ + public abstract void setCleanerChoreRunning( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done); + + /** + *
+     **
+     * Query the state of CleanerChore.
+     * 
+ * + * rpc GetCleanerChoreState(.hbase.pb.GetCleanerChoreStateRequest) returns (.hbase.pb.GetCleanerChoreStateResponse); + */ + public abstract void getCleanerChoreState( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done); + + /** + *
      **
      * Call a master coprocessor endpoint
      * 
@@ -70309,171 +73179,186 @@ public final class MasterProtos { done)); return; case 33: + this.runCleanerChore(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest)request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 34: + this.setCleanerChoreRunning(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest)request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 35: + this.getCleanerChoreState(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest)request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 36: this.execMasterService(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 34: + case 37: this.snapshot(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 35: + case 38: this.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 36: + case 39: this.deleteSnapshot(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 37: + case 40: this.isSnapshotDone(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 38: + case 41: this.restoreSnapshot(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 39: + case 42: this.execProcedure(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 40: + case 43: this.execProcedureWithRet(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 41: + case 44: this.isProcedureDone(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 42: + case 45: this.modifyNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 43: + case 46: this.createNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 44: + case 47: this.deleteNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 45: + case 48: this.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 46: + case 49: this.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 47: + case 50: this.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 48: + case 51: this.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 49: + case 52: this.getTableState(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 50: + case 53: this.setQuota(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 51: + case 54: this.getLastMajorCompactionTimestamp(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 52: + case 55: this.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 53: + case 56: this.getProcedureResult(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 54: + case 57: this.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 55: + case 58: this.abortProcedure(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 56: + case 59: this.listProcedures(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 57: + case 60: this.addReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 58: + case 61: this.removeReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 59: + case 62: this.enableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 60: + case 63: this.disableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 61: + case 64: this.getReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 62: + case 65: this.updateReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 63: + case 66: this.listReplicationPeers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 64: + case 67: this.listDrainingRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 65: + case 68: this.drainRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 66: + case 69: this.removeDrainFromRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); @@ -70559,72 +73444,78 @@ public final class MasterProtos { case 32: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance(); case 33: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest.getDefaultInstance(); case 34: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest.getDefaultInstance(); case 35: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest.getDefaultInstance(); case 36: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 37: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance(); case 38: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); case 39: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance(); case 40: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance(); case 41: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance(); case 42: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); case 43: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance(); case 44: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance(); case 45: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance(); case 46: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance(); case 47: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance(); case 48: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance(); case 49: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance(); case 50: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance(); case 51: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance(); case 52: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest.getDefaultInstance(); case 53: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance(); case 54: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance(); case 55: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance(); case 56: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance(); case 57: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance(); case 58: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance(); case 59: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance(); case 60: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest.getDefaultInstance(); case 61: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest.getDefaultInstance(); case 62: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest.getDefaultInstance(); case 63: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest.getDefaultInstance(); case 64: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest.getDefaultInstance(); case 65: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest.getDefaultInstance(); case 66: + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest.getDefaultInstance(); + case 67: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest.getDefaultInstance(); + case 68: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest.getDefaultInstance(); + case 69: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -70707,72 +73598,78 @@ public final class MasterProtos { case 32: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(); case 33: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.getDefaultInstance(); case 34: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.getDefaultInstance(); case 35: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.getDefaultInstance(); case 36: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 37: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(); case 38: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(); case 39: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(); case 40: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(); case 41: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(); case 42: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); case 43: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(); case 44: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(); case 45: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(); case 46: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(); case 47: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(); case 48: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(); case 49: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(); case 50: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(); case 51: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(); case 52: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance(); case 53: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance(); case 54: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(); case 55: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(); case 56: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance(); case 57: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance(); case 58: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance(); case 59: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(); case 60: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance(); case 61: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance(); case 62: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance(); case 63: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance(); case 64: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance(); case 65: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance(); case 66: + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance(); + case 67: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance(); + case 68: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance(); + case 69: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -71290,12 +74187,57 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance())); } + public void runCleanerChore( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(33), + controller, + request, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.getDefaultInstance(), + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.class, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.getDefaultInstance())); + } + + public void setCleanerChoreRunning( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(34), + controller, + request, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.getDefaultInstance(), + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.class, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.getDefaultInstance())); + } + + public void getCleanerChoreState( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(35), + controller, + request, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.getDefaultInstance(), + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.class, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.getDefaultInstance())); + } + public void execMasterService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(33), + getDescriptor().getMethods().get(36), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(), @@ -71310,7 +74252,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(34), + getDescriptor().getMethods().get(37), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(), @@ -71325,7 +74267,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(35), + getDescriptor().getMethods().get(38), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(), @@ -71340,7 +74282,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(36), + getDescriptor().getMethods().get(39), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(), @@ -71355,7 +74297,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(37), + getDescriptor().getMethods().get(40), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(), @@ -71370,7 +74312,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(38), + getDescriptor().getMethods().get(41), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(), @@ -71385,7 +74327,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(39), + getDescriptor().getMethods().get(42), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(), @@ -71400,7 +74342,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(40), + getDescriptor().getMethods().get(43), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(), @@ -71415,7 +74357,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(41), + getDescriptor().getMethods().get(44), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(), @@ -71430,7 +74372,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(42), + getDescriptor().getMethods().get(45), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(), @@ -71445,7 +74387,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(43), + getDescriptor().getMethods().get(46), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(), @@ -71460,7 +74402,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(44), + getDescriptor().getMethods().get(47), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(), @@ -71475,7 +74417,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(45), + getDescriptor().getMethods().get(48), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(), @@ -71490,7 +74432,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(46), + getDescriptor().getMethods().get(49), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(), @@ -71505,7 +74447,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(47), + getDescriptor().getMethods().get(50), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(), @@ -71520,7 +74462,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(48), + getDescriptor().getMethods().get(51), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(), @@ -71535,7 +74477,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(49), + getDescriptor().getMethods().get(52), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance(), @@ -71550,7 +74492,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(50), + getDescriptor().getMethods().get(53), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance(), @@ -71565,7 +74507,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(51), + getDescriptor().getMethods().get(54), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(), @@ -71580,7 +74522,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(52), + getDescriptor().getMethods().get(55), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(), @@ -71595,7 +74537,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(53), + getDescriptor().getMethods().get(56), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance(), @@ -71610,7 +74552,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(54), + getDescriptor().getMethods().get(57), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance(), @@ -71625,7 +74567,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(55), + getDescriptor().getMethods().get(58), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance(), @@ -71640,7 +74582,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(56), + getDescriptor().getMethods().get(59), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(), @@ -71655,7 +74597,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(57), + getDescriptor().getMethods().get(60), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance(), @@ -71670,7 +74612,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(58), + getDescriptor().getMethods().get(61), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance(), @@ -71685,7 +74627,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(59), + getDescriptor().getMethods().get(62), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance(), @@ -71700,7 +74642,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(60), + getDescriptor().getMethods().get(63), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance(), @@ -71715,7 +74657,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(61), + getDescriptor().getMethods().get(64), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance(), @@ -71730,7 +74672,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(62), + getDescriptor().getMethods().get(65), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance(), @@ -71745,7 +74687,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(63), + getDescriptor().getMethods().get(66), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance(), @@ -71760,7 +74702,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(64), + getDescriptor().getMethods().get(67), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance(), @@ -71775,7 +74717,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(65), + getDescriptor().getMethods().get(68), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance(), @@ -71790,7 +74732,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(66), + getDescriptor().getMethods().get(69), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse.getDefaultInstance(), @@ -71972,6 +74914,21 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse runCleanerChore( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest request) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse setCleanerChoreRunning( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest request) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse getCleanerChoreState( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest request) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) @@ -72546,12 +75503,48 @@ public final class MasterProtos { } + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse runCleanerChore( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest request) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(33), + controller, + request, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse setCleanerChoreRunning( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest request) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(34), + controller, + request, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse getCleanerChoreState( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateRequest request) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(35), + controller, + request, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.getDefaultInstance()); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(33), + getDescriptor().getMethods().get(36), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); @@ -72563,7 +75556,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(34), + getDescriptor().getMethods().get(37), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance()); @@ -72575,7 +75568,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(35), + getDescriptor().getMethods().get(38), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance()); @@ -72587,7 +75580,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(36), + getDescriptor().getMethods().get(39), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance()); @@ -72599,7 +75592,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(37), + getDescriptor().getMethods().get(40), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance()); @@ -72611,7 +75604,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(38), + getDescriptor().getMethods().get(41), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance()); @@ -72623,7 +75616,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(39), + getDescriptor().getMethods().get(42), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance()); @@ -72635,7 +75628,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(40), + getDescriptor().getMethods().get(43), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance()); @@ -72647,7 +75640,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(41), + getDescriptor().getMethods().get(44), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance()); @@ -72659,7 +75652,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(42), + getDescriptor().getMethods().get(45), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance()); @@ -72671,7 +75664,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(43), + getDescriptor().getMethods().get(46), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance()); @@ -72683,7 +75676,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(44), + getDescriptor().getMethods().get(47), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance()); @@ -72695,7 +75688,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(45), + getDescriptor().getMethods().get(48), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance()); @@ -72707,7 +75700,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(46), + getDescriptor().getMethods().get(49), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance()); @@ -72719,7 +75712,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(47), + getDescriptor().getMethods().get(50), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance()); @@ -72731,7 +75724,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(48), + getDescriptor().getMethods().get(51), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance()); @@ -72743,7 +75736,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(49), + getDescriptor().getMethods().get(52), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse.getDefaultInstance()); @@ -72755,7 +75748,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(50), + getDescriptor().getMethods().get(53), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance()); @@ -72767,7 +75760,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(51), + getDescriptor().getMethods().get(54), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance()); @@ -72779,7 +75772,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(52), + getDescriptor().getMethods().get(55), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance()); @@ -72791,7 +75784,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(53), + getDescriptor().getMethods().get(56), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance()); @@ -72803,7 +75796,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(54), + getDescriptor().getMethods().get(57), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance()); @@ -72815,7 +75808,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(55), + getDescriptor().getMethods().get(58), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance()); @@ -72827,7 +75820,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(56), + getDescriptor().getMethods().get(59), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance()); @@ -72839,7 +75832,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(57), + getDescriptor().getMethods().get(60), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance()); @@ -72851,7 +75844,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(58), + getDescriptor().getMethods().get(61), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance()); @@ -72863,7 +75856,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(59), + getDescriptor().getMethods().get(62), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance()); @@ -72875,7 +75868,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(60), + getDescriptor().getMethods().get(63), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance()); @@ -72887,7 +75880,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(61), + getDescriptor().getMethods().get(64), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance()); @@ -72899,7 +75892,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(62), + getDescriptor().getMethods().get(65), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance()); @@ -72911,7 +75904,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(63), + getDescriptor().getMethods().get(66), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance()); @@ -72923,7 +75916,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(64), + getDescriptor().getMethods().get(67), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance()); @@ -72935,7 +75928,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(65), + getDescriptor().getMethods().get(68), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance()); @@ -72947,7 +75940,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(66), + getDescriptor().getMethods().get(69), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse.getDefaultInstance()); @@ -73309,6 +76302,36 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_RunCleanerChoreRequest_descriptor; + private static final + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_hbase_pb_RunCleanerChoreRequest_fieldAccessorTable; + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_RunCleanerChoreResponse_descriptor; + private static final + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_hbase_pb_RunCleanerChoreResponse_fieldAccessorTable; + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_SetCleanerChoreRunningRequest_descriptor; + private static final + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_hbase_pb_SetCleanerChoreRunningRequest_fieldAccessorTable; + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_SetCleanerChoreRunningResponse_descriptor; + private static final + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_hbase_pb_SetCleanerChoreRunningResponse_fieldAccessorTable; + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_GetCleanerChoreStateRequest_descriptor; + private static final + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_hbase_pb_GetCleanerChoreStateRequest_fieldAccessorTable; + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_GetCleanerChoreStateResponse_descriptor; + private static final + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_hbase_pb_GetCleanerChoreStateResponse_fieldAccessorTable; + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SnapshotRequest_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable @@ -73667,256 +76690,275 @@ public final class MasterProtos { "gJanitorResponse\022\022\n\nprev_value\030\001 \001(\010\" \n\036" + "IsCatalogJanitorEnabledRequest\"0\n\037IsCata" + "logJanitorEnabledResponse\022\r\n\005value\030\001 \002(\010" + - "\"B\n\017SnapshotRequest\022/\n\010snapshot\030\001 \002(\0132\035." + - "hbase.pb.SnapshotDescription\",\n\020Snapshot" + - "Response\022\030\n\020expected_timeout\030\001 \002(\003\"\036\n\034Ge" + - "tCompletedSnapshotsRequest\"Q\n\035GetComplet" + - "edSnapshotsResponse\0220\n\tsnapshots\030\001 \003(\0132\035", - ".hbase.pb.SnapshotDescription\"H\n\025DeleteS" + - "napshotRequest\022/\n\010snapshot\030\001 \002(\0132\035.hbase" + - ".pb.SnapshotDescription\"\030\n\026DeleteSnapsho" + - "tResponse\"s\n\026RestoreSnapshotRequest\022/\n\010s" + - "napshot\030\001 \002(\0132\035.hbase.pb.SnapshotDescrip" + - "tion\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 " + - "\001(\004:\0010\"*\n\027RestoreSnapshotResponse\022\017\n\007pro" + - "c_id\030\001 \002(\004\"H\n\025IsSnapshotDoneRequest\022/\n\010s" + - "napshot\030\001 \001(\0132\035.hbase.pb.SnapshotDescrip" + - "tion\"^\n\026IsSnapshotDoneResponse\022\023\n\004done\030\001", - " \001(\010:\005false\022/\n\010snapshot\030\002 \001(\0132\035.hbase.pb" + - ".SnapshotDescription\"O\n\034IsRestoreSnapsho" + - "tDoneRequest\022/\n\010snapshot\030\001 \001(\0132\035.hbase.p" + - "b.SnapshotDescription\"4\n\035IsRestoreSnapsh" + - "otDoneResponse\022\023\n\004done\030\001 \001(\010:\005false\"F\n\033G" + - "etSchemaAlterStatusRequest\022\'\n\ntable_name" + - "\030\001 \002(\0132\023.hbase.pb.TableName\"T\n\034GetSchema" + - "AlterStatusResponse\022\035\n\025yet_to_update_reg" + - "ions\030\001 \001(\r\022\025\n\rtotal_regions\030\002 \001(\r\"\213\001\n\032Ge" + - "tTableDescriptorsRequest\022(\n\013table_names\030", - "\001 \003(\0132\023.hbase.pb.TableName\022\r\n\005regex\030\002 \001(" + - "\t\022!\n\022include_sys_tables\030\003 \001(\010:\005false\022\021\n\t" + - "namespace\030\004 \001(\t\"J\n\033GetTableDescriptorsRe" + - "sponse\022+\n\014table_schema\030\001 \003(\0132\025.hbase.pb." + - "TableSchema\"[\n\024GetTableNamesRequest\022\r\n\005r" + - "egex\030\001 \001(\t\022!\n\022include_sys_tables\030\002 \001(\010:\005" + - "false\022\021\n\tnamespace\030\003 \001(\t\"A\n\025GetTableName" + - "sResponse\022(\n\013table_names\030\001 \003(\0132\023.hbase.p" + - "b.TableName\"?\n\024GetTableStateRequest\022\'\n\nt" + - "able_name\030\001 \002(\0132\023.hbase.pb.TableName\"B\n\025", - "GetTableStateResponse\022)\n\013table_state\030\001 \002" + - "(\0132\024.hbase.pb.TableState\"\031\n\027GetClusterSt" + - "atusRequest\"K\n\030GetClusterStatusResponse\022" + - "/\n\016cluster_status\030\001 \002(\0132\027.hbase.pb.Clust" + - "erStatus\"\030\n\026IsMasterRunningRequest\"4\n\027Is" + - "MasterRunningResponse\022\031\n\021is_master_runni" + - "ng\030\001 \002(\010\"I\n\024ExecProcedureRequest\0221\n\tproc" + - "edure\030\001 \002(\0132\036.hbase.pb.ProcedureDescript" + - "ion\"F\n\025ExecProcedureResponse\022\030\n\020expected" + - "_timeout\030\001 \001(\003\022\023\n\013return_data\030\002 \001(\014\"K\n\026I", - "sProcedureDoneRequest\0221\n\tprocedure\030\001 \001(\013" + - "2\036.hbase.pb.ProcedureDescription\"`\n\027IsPr" + - "ocedureDoneResponse\022\023\n\004done\030\001 \001(\010:\005false" + - "\0220\n\010snapshot\030\002 \001(\0132\036.hbase.pb.ProcedureD" + - "escription\",\n\031GetProcedureResultRequest\022" + - "\017\n\007proc_id\030\001 \002(\004\"\371\001\n\032GetProcedureResultR" + - "esponse\0229\n\005state\030\001 \002(\0162*.hbase.pb.GetPro" + - "cedureResultResponse.State\022\022\n\nstart_time" + - "\030\002 \001(\004\022\023\n\013last_update\030\003 \001(\004\022\016\n\006result\030\004 " + - "\001(\014\0224\n\texception\030\005 \001(\0132!.hbase.pb.Foreig", - "nExceptionMessage\"1\n\005State\022\r\n\tNOT_FOUND\020" + - "\000\022\013\n\007RUNNING\020\001\022\014\n\010FINISHED\020\002\"M\n\025AbortPro" + - "cedureRequest\022\017\n\007proc_id\030\001 \002(\004\022#\n\025mayInt" + - "erruptIfRunning\030\002 \001(\010:\004true\"6\n\026AbortProc" + - "edureResponse\022\034\n\024is_procedure_aborted\030\001 " + - "\002(\010\"\027\n\025ListProceduresRequest\"@\n\026ListProc" + - "eduresResponse\022&\n\tprocedure\030\001 \003(\0132\023.hbas" + - "e.pb.Procedure\"\315\001\n\017SetQuotaRequest\022\021\n\tus" + - "er_name\030\001 \001(\t\022\022\n\nuser_group\030\002 \001(\t\022\021\n\tnam" + - "espace\030\003 \001(\t\022\'\n\ntable_name\030\004 \001(\0132\023.hbase", - ".pb.TableName\022\022\n\nremove_all\030\005 \001(\010\022\026\n\016byp" + - "ass_globals\030\006 \001(\010\022+\n\010throttle\030\007 \001(\0132\031.hb" + - "ase.pb.ThrottleRequest\"\022\n\020SetQuotaRespon" + - "se\"J\n\037MajorCompactionTimestampRequest\022\'\n" + - "\ntable_name\030\001 \002(\0132\023.hbase.pb.TableName\"U" + - "\n(MajorCompactionTimestampForRegionReque" + - "st\022)\n\006region\030\001 \002(\0132\031.hbase.pb.RegionSpec" + - "ifier\"@\n MajorCompactionTimestampRespons" + - "e\022\034\n\024compaction_timestamp\030\001 \002(\003\"\035\n\033Secur" + - "ityCapabilitiesRequest\"\354\001\n\034SecurityCapab", - "ilitiesResponse\022G\n\014capabilities\030\001 \003(\01621." + - "hbase.pb.SecurityCapabilitiesResponse.Ca" + - "pability\"\202\001\n\nCapability\022\031\n\025SIMPLE_AUTHEN" + - "TICATION\020\000\022\031\n\025SECURE_AUTHENTICATION\020\001\022\021\n" + - "\rAUTHORIZATION\020\002\022\026\n\022CELL_AUTHORIZATION\020\003" + - "\022\023\n\017CELL_VISIBILITY\020\004\"\"\n ListDrainingReg" + - "ionServersRequest\"N\n!ListDrainingRegionS" + - "erversResponse\022)\n\013server_name\030\001 \003(\0132\024.hb" + - "ase.pb.ServerName\"F\n\031DrainRegionServersR" + - "equest\022)\n\013server_name\030\001 \003(\0132\024.hbase.pb.S", - "erverName\"\034\n\032DrainRegionServersResponse\"" + - "P\n#RemoveDrainFromRegionServersRequest\022)" + - "\n\013server_name\030\001 \003(\0132\024.hbase.pb.ServerNam" + - "e\"&\n$RemoveDrainFromRegionServersRespons" + - "e*(\n\020MasterSwitchType\022\t\n\005SPLIT\020\000\022\t\n\005MERG" + - "E\020\0012\2221\n\rMasterService\022e\n\024GetSchemaAlterS" + - "tatus\022%.hbase.pb.GetSchemaAlterStatusReq" + - "uest\032&.hbase.pb.GetSchemaAlterStatusResp" + - "onse\022b\n\023GetTableDescriptors\022$.hbase.pb.G" + - "etTableDescriptorsRequest\032%.hbase.pb.Get", - "TableDescriptorsResponse\022P\n\rGetTableName" + - "s\022\036.hbase.pb.GetTableNamesRequest\032\037.hbas" + - "e.pb.GetTableNamesResponse\022Y\n\020GetCluster" + - "Status\022!.hbase.pb.GetClusterStatusReques" + - "t\032\".hbase.pb.GetClusterStatusResponse\022V\n" + - "\017IsMasterRunning\022 .hbase.pb.IsMasterRunn" + - "ingRequest\032!.hbase.pb.IsMasterRunningRes" + - "ponse\022D\n\tAddColumn\022\032.hbase.pb.AddColumnR" + - "equest\032\033.hbase.pb.AddColumnResponse\022M\n\014D" + - "eleteColumn\022\035.hbase.pb.DeleteColumnReque", - "st\032\036.hbase.pb.DeleteColumnResponse\022M\n\014Mo" + - "difyColumn\022\035.hbase.pb.ModifyColumnReques" + - "t\032\036.hbase.pb.ModifyColumnResponse\022G\n\nMov" + - "eRegion\022\033.hbase.pb.MoveRegionRequest\032\034.h" + - "base.pb.MoveRegionResponse\022\\\n\021MergeTable" + - "Regions\022\".hbase.pb.MergeTableRegionsRequ" + - "est\032#.hbase.pb.MergeTableRegionsResponse" + - "\022M\n\014AssignRegion\022\035.hbase.pb.AssignRegion" + - "Request\032\036.hbase.pb.AssignRegionResponse\022" + - "S\n\016UnassignRegion\022\037.hbase.pb.UnassignReg", - "ionRequest\032 .hbase.pb.UnassignRegionResp" + - "onse\022P\n\rOfflineRegion\022\036.hbase.pb.Offline" + - "RegionRequest\032\037.hbase.pb.OfflineRegionRe" + - "sponse\022J\n\013DeleteTable\022\034.hbase.pb.DeleteT" + - "ableRequest\032\035.hbase.pb.DeleteTableRespon" + - "se\022P\n\rtruncateTable\022\036.hbase.pb.TruncateT" + - "ableRequest\032\037.hbase.pb.TruncateTableResp" + - "onse\022J\n\013EnableTable\022\034.hbase.pb.EnableTab" + - "leRequest\032\035.hbase.pb.EnableTableResponse" + - "\022M\n\014DisableTable\022\035.hbase.pb.DisableTable", - "Request\032\036.hbase.pb.DisableTableResponse\022" + - "J\n\013ModifyTable\022\034.hbase.pb.ModifyTableReq" + - "uest\032\035.hbase.pb.ModifyTableResponse\022J\n\013C" + - "reateTable\022\034.hbase.pb.CreateTableRequest" + - "\032\035.hbase.pb.CreateTableResponse\022A\n\010Shutd" + - "own\022\031.hbase.pb.ShutdownRequest\032\032.hbase.p" + - "b.ShutdownResponse\022G\n\nStopMaster\022\033.hbase" + - ".pb.StopMasterRequest\032\034.hbase.pb.StopMas" + - "terResponse\022h\n\031IsMasterInMaintenanceMode" + - "\022$.hbase.pb.IsInMaintenanceModeRequest\032%", - ".hbase.pb.IsInMaintenanceModeResponse\022>\n" + - "\007Balance\022\030.hbase.pb.BalanceRequest\032\031.hba" + - "se.pb.BalanceResponse\022_\n\022SetBalancerRunn" + - "ing\022#.hbase.pb.SetBalancerRunningRequest" + - "\032$.hbase.pb.SetBalancerRunningResponse\022\\" + - "\n\021IsBalancerEnabled\022\".hbase.pb.IsBalance" + - "rEnabledRequest\032#.hbase.pb.IsBalancerEna" + - "bledResponse\022k\n\026SetSplitOrMergeEnabled\022\'" + - ".hbase.pb.SetSplitOrMergeEnabledRequest\032" + - "(.hbase.pb.SetSplitOrMergeEnabledRespons", - "e\022h\n\025IsSplitOrMergeEnabled\022&.hbase.pb.Is" + - "SplitOrMergeEnabledRequest\032\'.hbase.pb.Is" + - "SplitOrMergeEnabledResponse\022D\n\tNormalize" + - "\022\032.hbase.pb.NormalizeRequest\032\033.hbase.pb." + - "NormalizeResponse\022e\n\024SetNormalizerRunnin" + - "g\022%.hbase.pb.SetNormalizerRunningRequest" + - "\032&.hbase.pb.SetNormalizerRunningResponse" + - "\022b\n\023IsNormalizerEnabled\022$.hbase.pb.IsNor" + - "malizerEnabledRequest\032%.hbase.pb.IsNorma" + - "lizerEnabledResponse\022S\n\016RunCatalogScan\022\037", - ".hbase.pb.RunCatalogScanRequest\032 .hbase." + - "pb.RunCatalogScanResponse\022e\n\024EnableCatal" + - "ogJanitor\022%.hbase.pb.EnableCatalogJanito" + - "rRequest\032&.hbase.pb.EnableCatalogJanitor" + - "Response\022n\n\027IsCatalogJanitorEnabled\022(.hb" + - "ase.pb.IsCatalogJanitorEnabledRequest\032)." + - "hbase.pb.IsCatalogJanitorEnabledResponse" + - "\022^\n\021ExecMasterService\022#.hbase.pb.Coproce" + - "ssorServiceRequest\032$.hbase.pb.Coprocesso" + - "rServiceResponse\022A\n\010Snapshot\022\031.hbase.pb.", - "SnapshotRequest\032\032.hbase.pb.SnapshotRespo" + - "nse\022h\n\025GetCompletedSnapshots\022&.hbase.pb." + - "GetCompletedSnapshotsRequest\032\'.hbase.pb." + - "GetCompletedSnapshotsResponse\022S\n\016DeleteS" + - "napshot\022\037.hbase.pb.DeleteSnapshotRequest" + - "\032 .hbase.pb.DeleteSnapshotResponse\022S\n\016Is" + - "SnapshotDone\022\037.hbase.pb.IsSnapshotDoneRe" + - "quest\032 .hbase.pb.IsSnapshotDoneResponse\022" + - "V\n\017RestoreSnapshot\022 .hbase.pb.RestoreSna" + - "pshotRequest\032!.hbase.pb.RestoreSnapshotR", - "esponse\022P\n\rExecProcedure\022\036.hbase.pb.Exec" + - "ProcedureRequest\032\037.hbase.pb.ExecProcedur" + - "eResponse\022W\n\024ExecProcedureWithRet\022\036.hbas" + - "e.pb.ExecProcedureRequest\032\037.hbase.pb.Exe" + - "cProcedureResponse\022V\n\017IsProcedureDone\022 ." + - "hbase.pb.IsProcedureDoneRequest\032!.hbase." + - "pb.IsProcedureDoneResponse\022V\n\017ModifyName" + - "space\022 .hbase.pb.ModifyNamespaceRequest\032" + - "!.hbase.pb.ModifyNamespaceResponse\022V\n\017Cr" + - "eateNamespace\022 .hbase.pb.CreateNamespace", - "Request\032!.hbase.pb.CreateNamespaceRespon" + - "se\022V\n\017DeleteNamespace\022 .hbase.pb.DeleteN" + - "amespaceRequest\032!.hbase.pb.DeleteNamespa" + - "ceResponse\022k\n\026GetNamespaceDescriptor\022\'.h" + - "base.pb.GetNamespaceDescriptorRequest\032(." + - "hbase.pb.GetNamespaceDescriptorResponse\022" + - "q\n\030ListNamespaceDescriptors\022).hbase.pb.L" + - "istNamespaceDescriptorsRequest\032*.hbase.p" + - "b.ListNamespaceDescriptorsResponse\022\206\001\n\037L" + - "istTableDescriptorsByNamespace\0220.hbase.p", - "b.ListTableDescriptorsByNamespaceRequest" + - "\0321.hbase.pb.ListTableDescriptorsByNamesp" + - "aceResponse\022t\n\031ListTableNamesByNamespace" + - "\022*.hbase.pb.ListTableNamesByNamespaceReq" + - "uest\032+.hbase.pb.ListTableNamesByNamespac" + - "eResponse\022P\n\rGetTableState\022\036.hbase.pb.Ge" + - "tTableStateRequest\032\037.hbase.pb.GetTableSt" + - "ateResponse\022A\n\010SetQuota\022\031.hbase.pb.SetQu" + - "otaRequest\032\032.hbase.pb.SetQuotaResponse\022x" + - "\n\037getLastMajorCompactionTimestamp\022).hbas", - "e.pb.MajorCompactionTimestampRequest\032*.h" + - "base.pb.MajorCompactionTimestampResponse" + - "\022\212\001\n(getLastMajorCompactionTimestampForR" + - "egion\0222.hbase.pb.MajorCompactionTimestam" + - "pForRegionRequest\032*.hbase.pb.MajorCompac" + - "tionTimestampResponse\022_\n\022getProcedureRes" + - "ult\022#.hbase.pb.GetProcedureResultRequest" + - "\032$.hbase.pb.GetProcedureResultResponse\022h" + - "\n\027getSecurityCapabilities\022%.hbase.pb.Sec" + - "urityCapabilitiesRequest\032&.hbase.pb.Secu", - "rityCapabilitiesResponse\022S\n\016AbortProcedu" + - "re\022\037.hbase.pb.AbortProcedureRequest\032 .hb" + - "ase.pb.AbortProcedureResponse\022S\n\016ListPro" + - "cedures\022\037.hbase.pb.ListProceduresRequest" + - "\032 .hbase.pb.ListProceduresResponse\022_\n\022Ad" + - "dReplicationPeer\022#.hbase.pb.AddReplicati" + - "onPeerRequest\032$.hbase.pb.AddReplicationP" + - "eerResponse\022h\n\025RemoveReplicationPeer\022&.h" + - "base.pb.RemoveReplicationPeerRequest\032\'.h" + - "base.pb.RemoveReplicationPeerResponse\022h\n", - "\025EnableReplicationPeer\022&.hbase.pb.Enable" + - "ReplicationPeerRequest\032\'.hbase.pb.Enable" + - "ReplicationPeerResponse\022k\n\026DisableReplic" + - "ationPeer\022\'.hbase.pb.DisableReplicationP" + - "eerRequest\032(.hbase.pb.DisableReplication" + - "PeerResponse\022q\n\030GetReplicationPeerConfig" + - "\022).hbase.pb.GetReplicationPeerConfigRequ" + - "est\032*.hbase.pb.GetReplicationPeerConfigR" + - "esponse\022z\n\033UpdateReplicationPeerConfig\022," + - ".hbase.pb.UpdateReplicationPeerConfigReq", - "uest\032-.hbase.pb.UpdateReplicationPeerCon" + - "figResponse\022e\n\024ListReplicationPeers\022%.hb" + - "ase.pb.ListReplicationPeersRequest\032&.hba" + - "se.pb.ListReplicationPeersResponse\022t\n\031li" + - "stDrainingRegionServers\022*.hbase.pb.ListD" + - "rainingRegionServersRequest\032+.hbase.pb.L" + - "istDrainingRegionServersResponse\022_\n\022drai" + - "nRegionServers\022#.hbase.pb.DrainRegionSer" + - "versRequest\032$.hbase.pb.DrainRegionServer" + - "sResponse\022}\n\034removeDrainFromRegionServer", - "s\022-.hbase.pb.RemoveDrainFromRegionServer" + - "sRequest\032..hbase.pb.RemoveDrainFromRegio" + - "nServersResponseBI\n1org.apache.hadoop.hb" + - "ase.shaded.protobuf.generatedB\014MasterPro" + - "tosH\001\210\001\001\240\001\001" + "\"\030\n\026RunCleanerChoreRequest\"4\n\027RunCleaner" + + "ChoreResponse\022\031\n\021cleaner_chore_ran\030\001 \002(\010" + + "\"+\n\035SetCleanerChoreRunningRequest\022\n\n\002on\030" + + "\001 \002(\010\"4\n\036SetCleanerChoreRunningResponse\022" + + "\022\n\nprev_value\030\001 \001(\010\"\035\n\033GetCleanerChoreSt", + "ateRequest\"\224\002\n\034GetCleanerChoreStateRespo" + + "nse\022U\n\023cleaner_chore_state\030\001 \002(\01628.hbase" + + ".pb.GetCleanerChoreStateResponse.Cleaner" + + "ChoreState\"\234\001\n\021CleanerChoreState\022\031\n\025HFIL" + + "E_CLEANER_ENABLED\020\000\022\033\n\027LOGFILE_CLEANER_E" + + "NABLED\020\001\022&\n\"BOTH_HFILE_LOGFILE_CLEANER_E" + + "NABLED\020\002\022\'\n#BOTH_HFILE_LOGFILE_CLEANER_D" + + "ISABLED\020\003\"B\n\017SnapshotRequest\022/\n\010snapshot" + + "\030\001 \002(\0132\035.hbase.pb.SnapshotDescription\",\n" + + "\020SnapshotResponse\022\030\n\020expected_timeout\030\001 ", + "\002(\003\"\036\n\034GetCompletedSnapshotsRequest\"Q\n\035G" + + "etCompletedSnapshotsResponse\0220\n\tsnapshot" + + "s\030\001 \003(\0132\035.hbase.pb.SnapshotDescription\"H" + + "\n\025DeleteSnapshotRequest\022/\n\010snapshot\030\001 \002(" + + "\0132\035.hbase.pb.SnapshotDescription\"\030\n\026Dele" + + "teSnapshotResponse\"s\n\026RestoreSnapshotReq" + + "uest\022/\n\010snapshot\030\001 \002(\0132\035.hbase.pb.Snapsh" + + "otDescription\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n" + + "\005nonce\030\003 \001(\004:\0010\"*\n\027RestoreSnapshotRespon" + + "se\022\017\n\007proc_id\030\001 \002(\004\"H\n\025IsSnapshotDoneReq", + "uest\022/\n\010snapshot\030\001 \001(\0132\035.hbase.pb.Snapsh" + + "otDescription\"^\n\026IsSnapshotDoneResponse\022" + + "\023\n\004done\030\001 \001(\010:\005false\022/\n\010snapshot\030\002 \001(\0132\035" + + ".hbase.pb.SnapshotDescription\"O\n\034IsResto" + + "reSnapshotDoneRequest\022/\n\010snapshot\030\001 \001(\0132" + + "\035.hbase.pb.SnapshotDescription\"4\n\035IsRest" + + "oreSnapshotDoneResponse\022\023\n\004done\030\001 \001(\010:\005f" + + "alse\"F\n\033GetSchemaAlterStatusRequest\022\'\n\nt" + + "able_name\030\001 \002(\0132\023.hbase.pb.TableName\"T\n\034" + + "GetSchemaAlterStatusResponse\022\035\n\025yet_to_u", + "pdate_regions\030\001 \001(\r\022\025\n\rtotal_regions\030\002 \001" + + "(\r\"\213\001\n\032GetTableDescriptorsRequest\022(\n\013tab" + + "le_names\030\001 \003(\0132\023.hbase.pb.TableName\022\r\n\005r" + + "egex\030\002 \001(\t\022!\n\022include_sys_tables\030\003 \001(\010:\005" + + "false\022\021\n\tnamespace\030\004 \001(\t\"J\n\033GetTableDesc" + + "riptorsResponse\022+\n\014table_schema\030\001 \003(\0132\025." + + "hbase.pb.TableSchema\"[\n\024GetTableNamesReq" + + "uest\022\r\n\005regex\030\001 \001(\t\022!\n\022include_sys_table" + + "s\030\002 \001(\010:\005false\022\021\n\tnamespace\030\003 \001(\t\"A\n\025Get" + + "TableNamesResponse\022(\n\013table_names\030\001 \003(\0132", + "\023.hbase.pb.TableName\"?\n\024GetTableStateReq" + + "uest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Tabl" + + "eName\"B\n\025GetTableStateResponse\022)\n\013table_" + + "state\030\001 \002(\0132\024.hbase.pb.TableState\"\031\n\027Get" + + "ClusterStatusRequest\"K\n\030GetClusterStatus" + + "Response\022/\n\016cluster_status\030\001 \002(\0132\027.hbase" + + ".pb.ClusterStatus\"\030\n\026IsMasterRunningRequ" + + "est\"4\n\027IsMasterRunningResponse\022\031\n\021is_mas" + + "ter_running\030\001 \002(\010\"I\n\024ExecProcedureReques" + + "t\0221\n\tprocedure\030\001 \002(\0132\036.hbase.pb.Procedur", + "eDescription\"F\n\025ExecProcedureResponse\022\030\n" + + "\020expected_timeout\030\001 \001(\003\022\023\n\013return_data\030\002" + + " \001(\014\"K\n\026IsProcedureDoneRequest\0221\n\tproced" + + "ure\030\001 \001(\0132\036.hbase.pb.ProcedureDescriptio" + + "n\"`\n\027IsProcedureDoneResponse\022\023\n\004done\030\001 \001" + + "(\010:\005false\0220\n\010snapshot\030\002 \001(\0132\036.hbase.pb.P" + + "rocedureDescription\",\n\031GetProcedureResul" + + "tRequest\022\017\n\007proc_id\030\001 \002(\004\"\371\001\n\032GetProcedu" + + "reResultResponse\0229\n\005state\030\001 \002(\0162*.hbase." + + "pb.GetProcedureResultResponse.State\022\022\n\ns", + "tart_time\030\002 \001(\004\022\023\n\013last_update\030\003 \001(\004\022\016\n\006" + + "result\030\004 \001(\014\0224\n\texception\030\005 \001(\0132!.hbase." + + "pb.ForeignExceptionMessage\"1\n\005State\022\r\n\tN" + + "OT_FOUND\020\000\022\013\n\007RUNNING\020\001\022\014\n\010FINISHED\020\002\"M\n" + + "\025AbortProcedureRequest\022\017\n\007proc_id\030\001 \002(\004\022" + + "#\n\025mayInterruptIfRunning\030\002 \001(\010:\004true\"6\n\026" + + "AbortProcedureResponse\022\034\n\024is_procedure_a" + + "borted\030\001 \002(\010\"\027\n\025ListProceduresRequest\"@\n" + + "\026ListProceduresResponse\022&\n\tprocedure\030\001 \003" + + "(\0132\023.hbase.pb.Procedure\"\315\001\n\017SetQuotaRequ", + "est\022\021\n\tuser_name\030\001 \001(\t\022\022\n\nuser_group\030\002 \001" + + "(\t\022\021\n\tnamespace\030\003 \001(\t\022\'\n\ntable_name\030\004 \001(" + + "\0132\023.hbase.pb.TableName\022\022\n\nremove_all\030\005 \001" + + "(\010\022\026\n\016bypass_globals\030\006 \001(\010\022+\n\010throttle\030\007" + + " \001(\0132\031.hbase.pb.ThrottleRequest\"\022\n\020SetQu" + + "otaResponse\"J\n\037MajorCompactionTimestampR" + + "equest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Ta" + + "bleName\"U\n(MajorCompactionTimestampForRe" + + "gionRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.R" + + "egionSpecifier\"@\n MajorCompactionTimesta", + "mpResponse\022\034\n\024compaction_timestamp\030\001 \002(\003" + + "\"\035\n\033SecurityCapabilitiesRequest\"\354\001\n\034Secu" + + "rityCapabilitiesResponse\022G\n\014capabilities" + + "\030\001 \003(\01621.hbase.pb.SecurityCapabilitiesRe" + + "sponse.Capability\"\202\001\n\nCapability\022\031\n\025SIMP" + + "LE_AUTHENTICATION\020\000\022\031\n\025SECURE_AUTHENTICA" + + "TION\020\001\022\021\n\rAUTHORIZATION\020\002\022\026\n\022CELL_AUTHOR" + + "IZATION\020\003\022\023\n\017CELL_VISIBILITY\020\004\"\"\n ListDr" + + "ainingRegionServersRequest\"N\n!ListDraini" + + "ngRegionServersResponse\022)\n\013server_name\030\001", + " \003(\0132\024.hbase.pb.ServerName\"F\n\031DrainRegio" + + "nServersRequest\022)\n\013server_name\030\001 \003(\0132\024.h" + + "base.pb.ServerName\"\034\n\032DrainRegionServers" + + "Response\"P\n#RemoveDrainFromRegionServers" + + "Request\022)\n\013server_name\030\001 \003(\0132\024.hbase.pb." + + "ServerName\"&\n$RemoveDrainFromRegionServe" + + "rsResponse*(\n\020MasterSwitchType\022\t\n\005SPLIT\020" + + "\000\022\t\n\005MERGE\020\0012\2763\n\rMasterService\022e\n\024GetSch" + + "emaAlterStatus\022%.hbase.pb.GetSchemaAlter" + + "StatusRequest\032&.hbase.pb.GetSchemaAlterS", + "tatusResponse\022b\n\023GetTableDescriptors\022$.h" + + "base.pb.GetTableDescriptorsRequest\032%.hba" + + "se.pb.GetTableDescriptorsResponse\022P\n\rGet" + + "TableNames\022\036.hbase.pb.GetTableNamesReque" + + "st\032\037.hbase.pb.GetTableNamesResponse\022Y\n\020G" + + "etClusterStatus\022!.hbase.pb.GetClusterSta" + + "tusRequest\032\".hbase.pb.GetClusterStatusRe" + + "sponse\022V\n\017IsMasterRunning\022 .hbase.pb.IsM" + + "asterRunningRequest\032!.hbase.pb.IsMasterR" + + "unningResponse\022D\n\tAddColumn\022\032.hbase.pb.A", + "ddColumnRequest\032\033.hbase.pb.AddColumnResp" + + "onse\022M\n\014DeleteColumn\022\035.hbase.pb.DeleteCo" + + "lumnRequest\032\036.hbase.pb.DeleteColumnRespo" + + "nse\022M\n\014ModifyColumn\022\035.hbase.pb.ModifyCol" + + "umnRequest\032\036.hbase.pb.ModifyColumnRespon" + + "se\022G\n\nMoveRegion\022\033.hbase.pb.MoveRegionRe" + + "quest\032\034.hbase.pb.MoveRegionResponse\022\\\n\021M" + + "ergeTableRegions\022\".hbase.pb.MergeTableRe" + + "gionsRequest\032#.hbase.pb.MergeTableRegion" + + "sResponse\022M\n\014AssignRegion\022\035.hbase.pb.Ass", + "ignRegionRequest\032\036.hbase.pb.AssignRegion" + + "Response\022S\n\016UnassignRegion\022\037.hbase.pb.Un" + + "assignRegionRequest\032 .hbase.pb.UnassignR" + + "egionResponse\022P\n\rOfflineRegion\022\036.hbase.p" + + "b.OfflineRegionRequest\032\037.hbase.pb.Offlin" + + "eRegionResponse\022J\n\013DeleteTable\022\034.hbase.p" + + "b.DeleteTableRequest\032\035.hbase.pb.DeleteTa" + + "bleResponse\022P\n\rtruncateTable\022\036.hbase.pb." + + "TruncateTableRequest\032\037.hbase.pb.Truncate" + + "TableResponse\022J\n\013EnableTable\022\034.hbase.pb.", + "EnableTableRequest\032\035.hbase.pb.EnableTabl" + + "eResponse\022M\n\014DisableTable\022\035.hbase.pb.Dis" + + "ableTableRequest\032\036.hbase.pb.DisableTable" + + "Response\022J\n\013ModifyTable\022\034.hbase.pb.Modif" + + "yTableRequest\032\035.hbase.pb.ModifyTableResp" + + "onse\022J\n\013CreateTable\022\034.hbase.pb.CreateTab" + + "leRequest\032\035.hbase.pb.CreateTableResponse" + + "\022A\n\010Shutdown\022\031.hbase.pb.ShutdownRequest\032" + + "\032.hbase.pb.ShutdownResponse\022G\n\nStopMaste" + + "r\022\033.hbase.pb.StopMasterRequest\032\034.hbase.p", + "b.StopMasterResponse\022h\n\031IsMasterInMainte" + + "nanceMode\022$.hbase.pb.IsInMaintenanceMode" + + "Request\032%.hbase.pb.IsInMaintenanceModeRe" + + "sponse\022>\n\007Balance\022\030.hbase.pb.BalanceRequ" + + "est\032\031.hbase.pb.BalanceResponse\022_\n\022SetBal" + + "ancerRunning\022#.hbase.pb.SetBalancerRunni" + + "ngRequest\032$.hbase.pb.SetBalancerRunningR" + + "esponse\022\\\n\021IsBalancerEnabled\022\".hbase.pb." + + "IsBalancerEnabledRequest\032#.hbase.pb.IsBa" + + "lancerEnabledResponse\022k\n\026SetSplitOrMerge", + "Enabled\022\'.hbase.pb.SetSplitOrMergeEnable" + + "dRequest\032(.hbase.pb.SetSplitOrMergeEnabl" + + "edResponse\022h\n\025IsSplitOrMergeEnabled\022&.hb" + + "ase.pb.IsSplitOrMergeEnabledRequest\032\'.hb" + + "ase.pb.IsSplitOrMergeEnabledResponse\022D\n\t" + + "Normalize\022\032.hbase.pb.NormalizeRequest\032\033." + + "hbase.pb.NormalizeResponse\022e\n\024SetNormali" + + "zerRunning\022%.hbase.pb.SetNormalizerRunni" + + "ngRequest\032&.hbase.pb.SetNormalizerRunnin" + + "gResponse\022b\n\023IsNormalizerEnabled\022$.hbase", + ".pb.IsNormalizerEnabledRequest\032%.hbase.p" + + "b.IsNormalizerEnabledResponse\022S\n\016RunCata" + + "logScan\022\037.hbase.pb.RunCatalogScanRequest" + + "\032 .hbase.pb.RunCatalogScanResponse\022e\n\024En" + + "ableCatalogJanitor\022%.hbase.pb.EnableCata" + + "logJanitorRequest\032&.hbase.pb.EnableCatal" + + "ogJanitorResponse\022n\n\027IsCatalogJanitorEna" + + "bled\022(.hbase.pb.IsCatalogJanitorEnabledR" + + "equest\032).hbase.pb.IsCatalogJanitorEnable" + + "dResponse\022V\n\017RunCleanerChore\022 .hbase.pb.", + "RunCleanerChoreRequest\032!.hbase.pb.RunCle" + + "anerChoreResponse\022k\n\026SetCleanerChoreRunn" + + "ing\022\'.hbase.pb.SetCleanerChoreRunningReq" + + "uest\032(.hbase.pb.SetCleanerChoreRunningRe" + + "sponse\022e\n\024GetCleanerChoreState\022%.hbase.p" + + "b.GetCleanerChoreStateRequest\032&.hbase.pb" + + ".GetCleanerChoreStateResponse\022^\n\021ExecMas" + + "terService\022#.hbase.pb.CoprocessorService" + + "Request\032$.hbase.pb.CoprocessorServiceRes" + + "ponse\022A\n\010Snapshot\022\031.hbase.pb.SnapshotReq", + "uest\032\032.hbase.pb.SnapshotResponse\022h\n\025GetC" + + "ompletedSnapshots\022&.hbase.pb.GetComplete" + + "dSnapshotsRequest\032\'.hbase.pb.GetComplete" + + "dSnapshotsResponse\022S\n\016DeleteSnapshot\022\037.h" + + "base.pb.DeleteSnapshotRequest\032 .hbase.pb" + + ".DeleteSnapshotResponse\022S\n\016IsSnapshotDon" + + "e\022\037.hbase.pb.IsSnapshotDoneRequest\032 .hba" + + "se.pb.IsSnapshotDoneResponse\022V\n\017RestoreS" + + "napshot\022 .hbase.pb.RestoreSnapshotReques" + + "t\032!.hbase.pb.RestoreSnapshotResponse\022P\n\r", + "ExecProcedure\022\036.hbase.pb.ExecProcedureRe" + + "quest\032\037.hbase.pb.ExecProcedureResponse\022W" + + "\n\024ExecProcedureWithRet\022\036.hbase.pb.ExecPr" + + "ocedureRequest\032\037.hbase.pb.ExecProcedureR" + + "esponse\022V\n\017IsProcedureDone\022 .hbase.pb.Is" + + "ProcedureDoneRequest\032!.hbase.pb.IsProced" + + "ureDoneResponse\022V\n\017ModifyNamespace\022 .hba" + + "se.pb.ModifyNamespaceRequest\032!.hbase.pb." + + "ModifyNamespaceResponse\022V\n\017CreateNamespa" + + "ce\022 .hbase.pb.CreateNamespaceRequest\032!.h", + "base.pb.CreateNamespaceResponse\022V\n\017Delet" + + "eNamespace\022 .hbase.pb.DeleteNamespaceReq" + + "uest\032!.hbase.pb.DeleteNamespaceResponse\022" + + "k\n\026GetNamespaceDescriptor\022\'.hbase.pb.Get" + + "NamespaceDescriptorRequest\032(.hbase.pb.Ge" + + "tNamespaceDescriptorResponse\022q\n\030ListName" + + "spaceDescriptors\022).hbase.pb.ListNamespac" + + "eDescriptorsRequest\032*.hbase.pb.ListNames" + + "paceDescriptorsResponse\022\206\001\n\037ListTableDes" + + "criptorsByNamespace\0220.hbase.pb.ListTable", + "DescriptorsByNamespaceRequest\0321.hbase.pb" + + ".ListTableDescriptorsByNamespaceResponse" + + "\022t\n\031ListTableNamesByNamespace\022*.hbase.pb" + + ".ListTableNamesByNamespaceRequest\032+.hbas" + + "e.pb.ListTableNamesByNamespaceResponse\022P" + + "\n\rGetTableState\022\036.hbase.pb.GetTableState" + + "Request\032\037.hbase.pb.GetTableStateResponse" + + "\022A\n\010SetQuota\022\031.hbase.pb.SetQuotaRequest\032" + + "\032.hbase.pb.SetQuotaResponse\022x\n\037getLastMa" + + "jorCompactionTimestamp\022).hbase.pb.MajorC", + "ompactionTimestampRequest\032*.hbase.pb.Maj" + + "orCompactionTimestampResponse\022\212\001\n(getLas" + + "tMajorCompactionTimestampForRegion\0222.hba" + + "se.pb.MajorCompactionTimestampForRegionR" + + "equest\032*.hbase.pb.MajorCompactionTimesta" + + "mpResponse\022_\n\022getProcedureResult\022#.hbase" + + ".pb.GetProcedureResultRequest\032$.hbase.pb" + + ".GetProcedureResultResponse\022h\n\027getSecuri" + + "tyCapabilities\022%.hbase.pb.SecurityCapabi" + + "litiesRequest\032&.hbase.pb.SecurityCapabil", + "itiesResponse\022S\n\016AbortProcedure\022\037.hbase." + + "pb.AbortProcedureRequest\032 .hbase.pb.Abor" + + "tProcedureResponse\022S\n\016ListProcedures\022\037.h" + + "base.pb.ListProceduresRequest\032 .hbase.pb" + + ".ListProceduresResponse\022_\n\022AddReplicatio" + + "nPeer\022#.hbase.pb.AddReplicationPeerReque" + + "st\032$.hbase.pb.AddReplicationPeerResponse" + + "\022h\n\025RemoveReplicationPeer\022&.hbase.pb.Rem" + + "oveReplicationPeerRequest\032\'.hbase.pb.Rem" + + "oveReplicationPeerResponse\022h\n\025EnableRepl", + "icationPeer\022&.hbase.pb.EnableReplication" + + "PeerRequest\032\'.hbase.pb.EnableReplication" + + "PeerResponse\022k\n\026DisableReplicationPeer\022\'" + + ".hbase.pb.DisableReplicationPeerRequest\032" + + "(.hbase.pb.DisableReplicationPeerRespons" + + "e\022q\n\030GetReplicationPeerConfig\022).hbase.pb" + + ".GetReplicationPeerConfigRequest\032*.hbase" + + ".pb.GetReplicationPeerConfigResponse\022z\n\033" + + "UpdateReplicationPeerConfig\022,.hbase.pb.U" + + "pdateReplicationPeerConfigRequest\032-.hbas", + "e.pb.UpdateReplicationPeerConfigResponse" + + "\022e\n\024ListReplicationPeers\022%.hbase.pb.List" + + "ReplicationPeersRequest\032&.hbase.pb.ListR" + + "eplicationPeersResponse\022t\n\031listDrainingR" + + "egionServers\022*.hbase.pb.ListDrainingRegi" + + "onServersRequest\032+.hbase.pb.ListDraining" + + "RegionServersResponse\022_\n\022drainRegionServ" + + "ers\022#.hbase.pb.DrainRegionServersRequest" + + "\032$.hbase.pb.DrainRegionServersResponse\022}" + + "\n\034removeDrainFromRegionServers\022-.hbase.p", + "b.RemoveDrainFromRegionServersRequest\032.." + + "hbase.pb.RemoveDrainFromRegionServersRes" + + "ponseBI\n1org.apache.hadoop.hbase.shaded." + + "protobuf.generatedB\014MasterProtosH\001\210\001\001\240\001\001" }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -74357,284 +77399,320 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_descriptor, new java.lang.String[] { "Value", }); - internal_static_hbase_pb_SnapshotRequest_descriptor = + internal_static_hbase_pb_RunCleanerChoreRequest_descriptor = getDescriptor().getMessageTypes().get(70); + internal_static_hbase_pb_RunCleanerChoreRequest_fieldAccessorTable = new + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RunCleanerChoreRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_RunCleanerChoreResponse_descriptor = + getDescriptor().getMessageTypes().get(71); + internal_static_hbase_pb_RunCleanerChoreResponse_fieldAccessorTable = new + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_RunCleanerChoreResponse_descriptor, + new java.lang.String[] { "CleanerChoreRan", }); + internal_static_hbase_pb_SetCleanerChoreRunningRequest_descriptor = + getDescriptor().getMessageTypes().get(72); + internal_static_hbase_pb_SetCleanerChoreRunningRequest_fieldAccessorTable = new + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SetCleanerChoreRunningRequest_descriptor, + new java.lang.String[] { "On", }); + internal_static_hbase_pb_SetCleanerChoreRunningResponse_descriptor = + getDescriptor().getMessageTypes().get(73); + internal_static_hbase_pb_SetCleanerChoreRunningResponse_fieldAccessorTable = new + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_SetCleanerChoreRunningResponse_descriptor, + new java.lang.String[] { "PrevValue", }); + internal_static_hbase_pb_GetCleanerChoreStateRequest_descriptor = + getDescriptor().getMessageTypes().get(74); + internal_static_hbase_pb_GetCleanerChoreStateRequest_fieldAccessorTable = new + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetCleanerChoreStateRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_GetCleanerChoreStateResponse_descriptor = + getDescriptor().getMessageTypes().get(75); + internal_static_hbase_pb_GetCleanerChoreStateResponse_fieldAccessorTable = new + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_GetCleanerChoreStateResponse_descriptor, + new java.lang.String[] { "CleanerChoreState", }); + internal_static_hbase_pb_SnapshotRequest_descriptor = + getDescriptor().getMessageTypes().get(76); internal_static_hbase_pb_SnapshotRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SnapshotRequest_descriptor, new java.lang.String[] { "Snapshot", }); internal_static_hbase_pb_SnapshotResponse_descriptor = - getDescriptor().getMessageTypes().get(71); + getDescriptor().getMessageTypes().get(77); internal_static_hbase_pb_SnapshotResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SnapshotResponse_descriptor, new java.lang.String[] { "ExpectedTimeout", }); internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor = - getDescriptor().getMessageTypes().get(72); + getDescriptor().getMessageTypes().get(78); internal_static_hbase_pb_GetCompletedSnapshotsRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetCompletedSnapshotsRequest_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor = - getDescriptor().getMessageTypes().get(73); + getDescriptor().getMessageTypes().get(79); internal_static_hbase_pb_GetCompletedSnapshotsResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetCompletedSnapshotsResponse_descriptor, new java.lang.String[] { "Snapshots", }); internal_static_hbase_pb_DeleteSnapshotRequest_descriptor = - getDescriptor().getMessageTypes().get(74); + getDescriptor().getMessageTypes().get(80); internal_static_hbase_pb_DeleteSnapshotRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_DeleteSnapshotRequest_descriptor, new java.lang.String[] { "Snapshot", }); internal_static_hbase_pb_DeleteSnapshotResponse_descriptor = - getDescriptor().getMessageTypes().get(75); + getDescriptor().getMessageTypes().get(81); internal_static_hbase_pb_DeleteSnapshotResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_DeleteSnapshotResponse_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_RestoreSnapshotRequest_descriptor = - getDescriptor().getMessageTypes().get(76); + getDescriptor().getMessageTypes().get(82); internal_static_hbase_pb_RestoreSnapshotRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RestoreSnapshotRequest_descriptor, new java.lang.String[] { "Snapshot", "NonceGroup", "Nonce", }); internal_static_hbase_pb_RestoreSnapshotResponse_descriptor = - getDescriptor().getMessageTypes().get(77); + getDescriptor().getMessageTypes().get(83); internal_static_hbase_pb_RestoreSnapshotResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RestoreSnapshotResponse_descriptor, new java.lang.String[] { "ProcId", }); internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor = - getDescriptor().getMessageTypes().get(78); + getDescriptor().getMessageTypes().get(84); internal_static_hbase_pb_IsSnapshotDoneRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_IsSnapshotDoneRequest_descriptor, new java.lang.String[] { "Snapshot", }); internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor = - getDescriptor().getMessageTypes().get(79); + getDescriptor().getMessageTypes().get(85); internal_static_hbase_pb_IsSnapshotDoneResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_IsSnapshotDoneResponse_descriptor, new java.lang.String[] { "Done", "Snapshot", }); internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor = - getDescriptor().getMessageTypes().get(80); + getDescriptor().getMessageTypes().get(86); internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_IsRestoreSnapshotDoneRequest_descriptor, new java.lang.String[] { "Snapshot", }); internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor = - getDescriptor().getMessageTypes().get(81); + getDescriptor().getMessageTypes().get(87); internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_IsRestoreSnapshotDoneResponse_descriptor, new java.lang.String[] { "Done", }); internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor = - getDescriptor().getMessageTypes().get(82); + getDescriptor().getMessageTypes().get(88); internal_static_hbase_pb_GetSchemaAlterStatusRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetSchemaAlterStatusRequest_descriptor, new java.lang.String[] { "TableName", }); internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor = - getDescriptor().getMessageTypes().get(83); + getDescriptor().getMessageTypes().get(89); internal_static_hbase_pb_GetSchemaAlterStatusResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetSchemaAlterStatusResponse_descriptor, new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", }); internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor = - getDescriptor().getMessageTypes().get(84); + getDescriptor().getMessageTypes().get(90); internal_static_hbase_pb_GetTableDescriptorsRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetTableDescriptorsRequest_descriptor, new java.lang.String[] { "TableNames", "Regex", "IncludeSysTables", "Namespace", }); internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor = - getDescriptor().getMessageTypes().get(85); + getDescriptor().getMessageTypes().get(91); internal_static_hbase_pb_GetTableDescriptorsResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetTableDescriptorsResponse_descriptor, new java.lang.String[] { "TableSchema", }); internal_static_hbase_pb_GetTableNamesRequest_descriptor = - getDescriptor().getMessageTypes().get(86); + getDescriptor().getMessageTypes().get(92); internal_static_hbase_pb_GetTableNamesRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetTableNamesRequest_descriptor, new java.lang.String[] { "Regex", "IncludeSysTables", "Namespace", }); internal_static_hbase_pb_GetTableNamesResponse_descriptor = - getDescriptor().getMessageTypes().get(87); + getDescriptor().getMessageTypes().get(93); internal_static_hbase_pb_GetTableNamesResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetTableNamesResponse_descriptor, new java.lang.String[] { "TableNames", }); internal_static_hbase_pb_GetTableStateRequest_descriptor = - getDescriptor().getMessageTypes().get(88); + getDescriptor().getMessageTypes().get(94); internal_static_hbase_pb_GetTableStateRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetTableStateRequest_descriptor, new java.lang.String[] { "TableName", }); internal_static_hbase_pb_GetTableStateResponse_descriptor = - getDescriptor().getMessageTypes().get(89); + getDescriptor().getMessageTypes().get(95); internal_static_hbase_pb_GetTableStateResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetTableStateResponse_descriptor, new java.lang.String[] { "TableState", }); internal_static_hbase_pb_GetClusterStatusRequest_descriptor = - getDescriptor().getMessageTypes().get(90); + getDescriptor().getMessageTypes().get(96); internal_static_hbase_pb_GetClusterStatusRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetClusterStatusRequest_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_GetClusterStatusResponse_descriptor = - getDescriptor().getMessageTypes().get(91); + getDescriptor().getMessageTypes().get(97); internal_static_hbase_pb_GetClusterStatusResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetClusterStatusResponse_descriptor, new java.lang.String[] { "ClusterStatus", }); internal_static_hbase_pb_IsMasterRunningRequest_descriptor = - getDescriptor().getMessageTypes().get(92); + getDescriptor().getMessageTypes().get(98); internal_static_hbase_pb_IsMasterRunningRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_IsMasterRunningRequest_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_IsMasterRunningResponse_descriptor = - getDescriptor().getMessageTypes().get(93); + getDescriptor().getMessageTypes().get(99); internal_static_hbase_pb_IsMasterRunningResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_IsMasterRunningResponse_descriptor, new java.lang.String[] { "IsMasterRunning", }); internal_static_hbase_pb_ExecProcedureRequest_descriptor = - getDescriptor().getMessageTypes().get(94); + getDescriptor().getMessageTypes().get(100); internal_static_hbase_pb_ExecProcedureRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ExecProcedureRequest_descriptor, new java.lang.String[] { "Procedure", }); internal_static_hbase_pb_ExecProcedureResponse_descriptor = - getDescriptor().getMessageTypes().get(95); + getDescriptor().getMessageTypes().get(101); internal_static_hbase_pb_ExecProcedureResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ExecProcedureResponse_descriptor, new java.lang.String[] { "ExpectedTimeout", "ReturnData", }); internal_static_hbase_pb_IsProcedureDoneRequest_descriptor = - getDescriptor().getMessageTypes().get(96); + getDescriptor().getMessageTypes().get(102); internal_static_hbase_pb_IsProcedureDoneRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_IsProcedureDoneRequest_descriptor, new java.lang.String[] { "Procedure", }); internal_static_hbase_pb_IsProcedureDoneResponse_descriptor = - getDescriptor().getMessageTypes().get(97); + getDescriptor().getMessageTypes().get(103); internal_static_hbase_pb_IsProcedureDoneResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_IsProcedureDoneResponse_descriptor, new java.lang.String[] { "Done", "Snapshot", }); internal_static_hbase_pb_GetProcedureResultRequest_descriptor = - getDescriptor().getMessageTypes().get(98); + getDescriptor().getMessageTypes().get(104); internal_static_hbase_pb_GetProcedureResultRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetProcedureResultRequest_descriptor, new java.lang.String[] { "ProcId", }); internal_static_hbase_pb_GetProcedureResultResponse_descriptor = - getDescriptor().getMessageTypes().get(99); + getDescriptor().getMessageTypes().get(105); internal_static_hbase_pb_GetProcedureResultResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_GetProcedureResultResponse_descriptor, new java.lang.String[] { "State", "StartTime", "LastUpdate", "Result", "Exception", }); internal_static_hbase_pb_AbortProcedureRequest_descriptor = - getDescriptor().getMessageTypes().get(100); + getDescriptor().getMessageTypes().get(106); internal_static_hbase_pb_AbortProcedureRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_AbortProcedureRequest_descriptor, new java.lang.String[] { "ProcId", "MayInterruptIfRunning", }); internal_static_hbase_pb_AbortProcedureResponse_descriptor = - getDescriptor().getMessageTypes().get(101); + getDescriptor().getMessageTypes().get(107); internal_static_hbase_pb_AbortProcedureResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_AbortProcedureResponse_descriptor, new java.lang.String[] { "IsProcedureAborted", }); internal_static_hbase_pb_ListProceduresRequest_descriptor = - getDescriptor().getMessageTypes().get(102); + getDescriptor().getMessageTypes().get(108); internal_static_hbase_pb_ListProceduresRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ListProceduresRequest_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_ListProceduresResponse_descriptor = - getDescriptor().getMessageTypes().get(103); + getDescriptor().getMessageTypes().get(109); internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ListProceduresResponse_descriptor, new java.lang.String[] { "Procedure", }); internal_static_hbase_pb_SetQuotaRequest_descriptor = - getDescriptor().getMessageTypes().get(104); + getDescriptor().getMessageTypes().get(110); internal_static_hbase_pb_SetQuotaRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SetQuotaRequest_descriptor, new java.lang.String[] { "UserName", "UserGroup", "Namespace", "TableName", "RemoveAll", "BypassGlobals", "Throttle", }); internal_static_hbase_pb_SetQuotaResponse_descriptor = - getDescriptor().getMessageTypes().get(105); + getDescriptor().getMessageTypes().get(111); internal_static_hbase_pb_SetQuotaResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SetQuotaResponse_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor = - getDescriptor().getMessageTypes().get(106); + getDescriptor().getMessageTypes().get(112); internal_static_hbase_pb_MajorCompactionTimestampRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor, new java.lang.String[] { "TableName", }); internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(107); + getDescriptor().getMessageTypes().get(113); internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor, new java.lang.String[] { "Region", }); internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor = - getDescriptor().getMessageTypes().get(108); + getDescriptor().getMessageTypes().get(114); internal_static_hbase_pb_MajorCompactionTimestampResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor, new java.lang.String[] { "CompactionTimestamp", }); internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor = - getDescriptor().getMessageTypes().get(109); + getDescriptor().getMessageTypes().get(115); internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor = - getDescriptor().getMessageTypes().get(110); + getDescriptor().getMessageTypes().get(116); internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor, new java.lang.String[] { "Capabilities", }); internal_static_hbase_pb_ListDrainingRegionServersRequest_descriptor = - getDescriptor().getMessageTypes().get(111); + getDescriptor().getMessageTypes().get(117); internal_static_hbase_pb_ListDrainingRegionServersRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ListDrainingRegionServersRequest_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_ListDrainingRegionServersResponse_descriptor = - getDescriptor().getMessageTypes().get(112); + getDescriptor().getMessageTypes().get(118); internal_static_hbase_pb_ListDrainingRegionServersResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ListDrainingRegionServersResponse_descriptor, new java.lang.String[] { "ServerName", }); internal_static_hbase_pb_DrainRegionServersRequest_descriptor = - getDescriptor().getMessageTypes().get(113); + getDescriptor().getMessageTypes().get(119); internal_static_hbase_pb_DrainRegionServersRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_DrainRegionServersRequest_descriptor, new java.lang.String[] { "ServerName", }); internal_static_hbase_pb_DrainRegionServersResponse_descriptor = - getDescriptor().getMessageTypes().get(114); + getDescriptor().getMessageTypes().get(120); internal_static_hbase_pb_DrainRegionServersResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_DrainRegionServersResponse_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_RemoveDrainFromRegionServersRequest_descriptor = - getDescriptor().getMessageTypes().get(115); + getDescriptor().getMessageTypes().get(121); internal_static_hbase_pb_RemoveDrainFromRegionServersRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RemoveDrainFromRegionServersRequest_descriptor, new java.lang.String[] { "ServerName", }); internal_static_hbase_pb_RemoveDrainFromRegionServersResponse_descriptor = - getDescriptor().getMessageTypes().get(116); + getDescriptor().getMessageTypes().get(122); internal_static_hbase_pb_RemoveDrainFromRegionServersResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RemoveDrainFromRegionServersResponse_descriptor, diff --git a/hbase-protocol-shaded/src/main/protobuf/Master.proto b/hbase-protocol-shaded/src/main/protobuf/Master.proto index e62f52c..cf958ed 100644 --- a/hbase-protocol-shaded/src/main/protobuf/Master.proto +++ b/hbase-protocol-shaded/src/main/protobuf/Master.proto @@ -356,6 +356,35 @@ message IsCatalogJanitorEnabledResponse { required bool value = 1; } +message RunCleanerChoreRequest { +} + +message RunCleanerChoreResponse { + required bool cleaner_chore_ran = 1; +} + +message SetCleanerChoreRunningRequest { + required bool on = 1; +} + +message SetCleanerChoreRunningResponse { + optional bool prev_value = 1; +} + +message GetCleanerChoreStateRequest { +} + +message GetCleanerChoreStateResponse { + enum CleanerChoreState { + HFILE_CLEANER_ENABLED = 0; + LOGFILE_CLEANER_ENABLED = 1; + BOTH_HFILE_LOGFILE_CLEANER_ENABLED = 2; + BOTH_HFILE_LOGFILE_CLEANER_DISABLED = 3; + } + + required CleanerChoreState cleaner_chore_state = 1; +} + message SnapshotRequest { required SnapshotDescription snapshot = 1; } @@ -742,6 +771,22 @@ service MasterService { rpc IsCatalogJanitorEnabled(IsCatalogJanitorEnabledRequest) returns(IsCatalogJanitorEnabledResponse); + /** Get a run of the CleanerChore */ + rpc RunCleanerChore(RunCleanerChoreRequest) + returns(RunCleanerChoreResponse); + + /** + * Enable the CleanerChore on or off. + */ + rpc SetCleanerChoreRunning(SetCleanerChoreRunningRequest) + returns(SetCleanerChoreRunningResponse); + + /** + * Query the state of CleanerChore. + */ + rpc GetCleanerChoreState(GetCleanerChoreStateRequest) + returns(GetCleanerChoreStateResponse); + /** * Call a master coprocessor endpoint */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 04c9b43..7582e6c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -75,6 +75,7 @@ import org.apache.hadoop.hbase.TableNotDisabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.UnknownRegionException; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.client.CleanerChoreState; import org.apache.hadoop.hbase.client.MasterSwitchType; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.TableState; @@ -96,9 +97,9 @@ import org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory; import org.apache.hadoop.hbase.master.cleaner.HFileCleaner; import org.apache.hadoop.hbase.master.cleaner.LogCleaner; import org.apache.hadoop.hbase.master.cleaner.ReplicationMetaCleaner; -import org.apache.hadoop.hbase.master.locking.LockManager; import org.apache.hadoop.hbase.master.cleaner.ReplicationZKNodeCleaner; import org.apache.hadoop.hbase.master.cleaner.ReplicationZKNodeCleanerChore; +import org.apache.hadoop.hbase.master.locking.LockManager; import org.apache.hadoop.hbase.master.normalizer.NormalizationPlan; import org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType; import org.apache.hadoop.hbase.master.normalizer.RegionNormalizer; @@ -112,9 +113,9 @@ import org.apache.hadoop.hbase.master.procedure.DisableTableProcedure; import org.apache.hadoop.hbase.master.procedure.EnableTableProcedure; import org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants; import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; +import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil; import org.apache.hadoop.hbase.master.procedure.MergeTableRegionsProcedure; import org.apache.hadoop.hbase.master.procedure.ModifyColumnFamilyProcedure; -import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil; import org.apache.hadoop.hbase.master.procedure.ModifyTableProcedure; import org.apache.hadoop.hbase.master.procedure.ProcedurePrepareLatch; import org.apache.hadoop.hbase.master.procedure.SplitTableRegionProcedure; @@ -948,6 +949,30 @@ public class HMaster extends HRegionServer implements MasterServices { catalogJanitorChore.getEnabled() : false; } + public CleanerChoreState getCleanerChoreState() { + if (hfileCleaner != null && logCleaner != null) { + if (hfileCleaner.getEnabled() && logCleaner.getEnabled()) { + return CleanerChoreState.BOTH_HFILE_LOG_CLEANER_ENABLED; + } else if (hfileCleaner.getEnabled() && !logCleaner.getEnabled()) { + return CleanerChoreState.HFILE_CLEANER_ENABLED; + } else if (!hfileCleaner.getEnabled() && logCleaner.getEnabled()) { + return CleanerChoreState.LOG_CLEANER_ENABLED; + } else { + return CleanerChoreState.BOTH_HFILE_LOG_CLEANER_DISABLED; + } + } + + if (hfileCleaner != null && hfileCleaner.getEnabled()) { + return CleanerChoreState.HFILE_CLEANER_ENABLED; + } + + if (logCleaner != null && logCleaner.getEnabled()) { + return CleanerChoreState.LOG_CLEANER_ENABLED; + } + + return CleanerChoreState.BOTH_HFILE_LOG_CLEANER_DISABLED; + } + @Override public TableDescriptors getTableDescriptors() { return this.tableDescriptors; @@ -2654,6 +2679,10 @@ public class HMaster extends HRegionServer implements MasterServices { return this.hfileCleaner; } + public LogCleaner getLogCleaner() { + return this.logCleaner; + } + /** * @return the underlying snapshot manager */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index 60b8b65..96d016f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -90,6 +90,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockR import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.*; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCleanerChoreStateResponse.CleanerChoreState; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse; @@ -702,6 +703,16 @@ public class MasterRpcServices extends RSRpcServices } @Override + public GetCleanerChoreStateResponse getCleanerChoreState(RpcController c, + GetCleanerChoreStateRequest req) + throws ServiceException { + int value = master.getCleanerChoreState().getValue(); + return GetCleanerChoreStateResponse.newBuilder() + .setCleanerChoreState(CleanerChoreState.forNumber(value)) + .build(); + } + + @Override public GetClusterStatusResponse getClusterStatus(RpcController controller, GetClusterStatusRequest req) throws ServiceException { GetClusterStatusResponse.Builder response = GetClusterStatusResponse.newBuilder(); @@ -1203,6 +1214,18 @@ public class MasterRpcServices extends RSRpcServices } @Override + public RunCleanerChoreResponse runCleanerChore(RpcController c, RunCleanerChoreRequest req) + throws ServiceException { + try { + master.checkInitialized(); + Boolean result = master.getHFileCleaner().runCleaner() && master.getLogCleaner().runCleaner(); + return ResponseConverter.buildRunCleanerChoreResponse(result); + } catch (IOException ioe) { + throw new ServiceException(ioe); + } + } + + @Override public SetBalancerRunningResponse setBalancerRunning(RpcController c, SetBalancerRunningRequest req) throws ServiceException { try { @@ -1216,6 +1239,22 @@ public class MasterRpcServices extends RSRpcServices } @Override + public SetCleanerChoreRunningResponse setCleanerChoreRunning(RpcController c, + SetCleanerChoreRunningRequest req) + throws ServiceException { + try { + master.checkInitialized(); + } catch (IOException ioe) { + throw new ServiceException(ioe); + } + boolean prevValue = + master.getLogCleaner().getEnabled() && master.getHFileCleaner().getEnabled(); + master.getLogCleaner().setEnabled(req.getOn()); + master.getHFileCleaner().setEnabled(req.getOn()); + return SetCleanerChoreRunningResponse.newBuilder().setPrevValue(prevValue).build(); + } + + @Override public ShutdownResponse shutdown(RpcController controller, ShutdownRequest request) throws ServiceException { LOG.info(master.getClientIdAuditPrefix() + " shutdown"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index b094507..c6b6f62 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -36,6 +36,7 @@ import java.io.IOException; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; /** * Abstract Cleaner that uses a chain of delegates to clean a directory of files @@ -50,6 +51,7 @@ public abstract class CleanerChore extends Schedu private final Configuration conf; protected List cleanersChain; protected Map params; + private AtomicBoolean enabled = new AtomicBoolean(true); public CleanerChore(String name, final int sleepPeriod, final Stoppable s, Configuration conf, FileSystem fs, Path oldFileDir, String confKey) { @@ -128,6 +130,14 @@ public abstract class CleanerChore extends Schedu @Override protected void chore() { + if (getEnabled()) { + runCleaner(); + } else { + LOG.debug("Cleaner chore disabled! Not cleaning."); + } + } + + public Boolean runCleaner() { try { FileStatus[] files = FSUtils.listStatus(this.fs, this.oldFileDir); checkAndDeleteEntries(files); @@ -135,7 +145,9 @@ public abstract class CleanerChore extends Schedu e = e instanceof RemoteException ? ((RemoteException)e).unwrapRemoteException() : e; LOG.warn("Error while cleaning the logs", e); + return false; } + return true; } /** @@ -291,4 +303,15 @@ public abstract class CleanerChore extends Schedu } } } + + /** + * @param enabled + */ + public boolean setEnabled(final boolean enabled) { + return this.enabled.getAndSet(enabled); + } + + public boolean getEnabled() { + return this.enabled.get(); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java index 92c7bb6..5c76643 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java @@ -289,6 +289,72 @@ public class TestCleanerChore { Mockito.verify(spy, Mockito.times(1)).isFileDeletable(Mockito.any(FileStatus.class)); } + @Test + public void testDeleteFileWithCleanerEnabled() throws Exception { + Stoppable stop = new StoppableImplementation(); + Configuration conf = UTIL.getConfiguration(); + Path testDir = UTIL.getDataTestDir(); + FileSystem fs = UTIL.getTestFileSystem(); + String confKey = "hbase.test.cleaner.delegates"; + conf.set(confKey, AlwaysDelete.class.getName()); + + AllValidPaths chore = new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey); + + // Enable cleaner + chore.setEnabled(true); + + // create the directory layout in the directory to clean + Path parent = new Path(testDir, "parent"); + Path child = new Path(parent, "child"); + Path file = new Path(child, "someFile"); + fs.mkdirs(child); + + // touch a new file + fs.create(file).close(); + assertTrue("Test file didn't get created.", fs.exists(file)); + + // run the chore + chore.chore(); + + // verify all the files got deleted + assertFalse("File didn't get deleted", fs.exists(file)); + assertFalse("Empty directory didn't get deleted", fs.exists(child)); + assertFalse("Empty directory didn't get deleted", fs.exists(parent)); + } + + @Test + public void testDeleteFileWithCleanerDisabled() throws Exception { + Stoppable stop = new StoppableImplementation(); + Configuration conf = UTIL.getConfiguration(); + Path testDir = UTIL.getDataTestDir(); + FileSystem fs = UTIL.getTestFileSystem(); + String confKey = "hbase.test.cleaner.delegates"; + conf.set(confKey, AlwaysDelete.class.getName()); + + AllValidPaths chore = new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey); + + // Disable cleaner + chore.setEnabled(false); + + // create the directory layout in the directory to clean + Path parent = new Path(testDir, "parent"); + Path child = new Path(parent, "child"); + Path file = new Path(child, "someFile"); + fs.mkdirs(child); + + // touch a new file + fs.create(file).close(); + assertTrue("Test file didn't get created.", fs.exists(file)); + + // run the chore + chore.chore(); + + // verify all the files exist + assertTrue("File got deleted with cleaner disabled", fs.exists(file)); + assertTrue("Directory got deleted", fs.exists(child)); + assertTrue("Directory got deleted", fs.exists(parent)); + } + private static class AllValidPaths extends CleanerChore { public AllValidPaths(String name, Stoppable s, Configuration conf, FileSystem fs, diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb b/hbase-shell/src/main/ruby/hbase/admin.rb index 0718627..37e0488 100644 --- a/hbase-shell/src/main/ruby/hbase/admin.rb +++ b/hbase-shell/src/main/ruby/hbase/admin.rb @@ -243,6 +243,26 @@ module Hbase end #---------------------------------------------------------------------------------------------- + # Request cleaner chore to run (for garbage collection of HFiles and WAL files) + def cleaner_chore_run() + @admin.runCleanerChore() + end + + #---------------------------------------------------------------------------------------------- + # Enable/disable the cleaner chore + # Returns previous cleaner switch setting. + def cleaner_chore_switch(enableDisable) + @admin.setCleanerChoreRunning(java.lang.Boolean::valueOf(enableDisable)) + end + + #---------------------------------------------------------------------------------------------- + # Query on the cleaner chore state + # Returns cleaner state + def cleaner_chore_state() + @admin.getCleanerChoreState() + end + + #---------------------------------------------------------------------------------------------- # Enables a table def enable(table_name) tableExists(table_name) diff --git a/hbase-shell/src/main/ruby/shell.rb b/hbase-shell/src/main/ruby/shell.rb index 2c9ab72..6597f65 100644 --- a/hbase-shell/src/main/ruby/shell.rb +++ b/hbase-shell/src/main/ruby/shell.rb @@ -348,6 +348,9 @@ Shell.load_command_group( catalogjanitor_run catalogjanitor_switch catalogjanitor_enabled + cleaner_chore_run + cleaner_chore_switch + cleaner_chore_state compact_rs compaction_state trace diff --git a/hbase-shell/src/main/ruby/shell/commands/cleaner_chore_run.rb b/hbase-shell/src/main/ruby/shell/commands/cleaner_chore_run.rb new file mode 100644 index 0000000..9ade9c9 --- /dev/null +++ b/hbase-shell/src/main/ruby/shell/commands/cleaner_chore_run.rb @@ -0,0 +1,35 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class CleanerChoreRun < Command + def help + return <<-EOF +Cleaner chore command for garbage collection of HFiles and WAL files. + + hbase> cleaner_chore_run + +EOF + end + def command() + admin.cleaner_chore_run() + end + end + end +end \ No newline at end of file diff --git a/hbase-shell/src/main/ruby/shell/commands/cleaner_chore_state.rb b/hbase-shell/src/main/ruby/shell/commands/cleaner_chore_state.rb new file mode 100644 index 0000000..e1b6479 --- /dev/null +++ b/hbase-shell/src/main/ruby/shell/commands/cleaner_chore_state.rb @@ -0,0 +1,36 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class CleanerChoreState < Command + def help + return <<-EOF +Query for the Cleaner chore state. +Examples: + + hbase> cleaner_chore_state + EOF + end + + def command() + puts admin.cleaner_chore_state() + end + end + end +end \ No newline at end of file diff --git a/hbase-shell/src/main/ruby/shell/commands/cleaner_chore_switch.rb b/hbase-shell/src/main/ruby/shell/commands/cleaner_chore_switch.rb new file mode 100644 index 0000000..20241ec --- /dev/null +++ b/hbase-shell/src/main/ruby/shell/commands/cleaner_chore_switch.rb @@ -0,0 +1,37 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class CleanerChoreSwitch < Command + def help + return <<-EOF +Enable/Disable Cleaner chore. Returns previous Cleaner chore state. +Examples: + + hbase> cleaner_chore_switch true + hbase> cleaner_chore_switch false +EOF + end + + def command(enableDisable) + formatter.row([admin.cleaner_chore_switch(enableDisable)? "true" : "false"]) + end + end + end +end \ No newline at end of file -- 2.10.1 (Apple Git-78)