From 59866628760661bce95896bf0973de21742d62f8 Mon Sep 17 00:00:00 2001 From: Balazs Meszaros Date: Mon, 13 Feb 2017 13:50:56 -0800 Subject: [PATCH] HBASE-15143 Procedure v2 - Web UI displaying queues --- .../java/org/apache/hadoop/hbase/client/Admin.java | 9 + .../hbase/client/ConnectionImplementation.java | 11 +- .../org/apache/hadoop/hbase/client/HBaseAdmin.java | 27 + .../hbase/client/ShortCircuitMasterConnection.java | 6 + hbase-common/pom.xml | 4 + .../java/org/apache/hadoop/hbase/LockInfo.java | 113 + .../java/org/apache/hadoop/hbase/LockUtil.java | 109 + .../java/org/apache/hadoop/hbase/TestLockUtil.java | 96 + .../hbase/procedure2/ProcedureScheduler.java | 8 + .../hbase/procedure2/SimpleProcedureScheduler.java | 12 +- .../shaded/protobuf/generated/LockProtos.java | 2071 ++++++++++++++++ .../shaded/protobuf/generated/MasterProtos.java | 2464 +++++++++++++++----- hbase-protocol-shaded/src/main/protobuf/Lock.proto | 47 + .../src/main/protobuf/Master.proto | 11 + .../hbase/tmpl/master/MasterStatusTmpl.jamon | 2 +- .../hadoop/hbase/coprocessor/MasterObserver.java | 19 + .../org/apache/hadoop/hbase/master/HMaster.java | 29 +- .../hadoop/hbase/master/MasterCoprocessorHost.java | 21 + .../hadoop/hbase/master/MasterRpcServices.java | 23 +- .../apache/hadoop/hbase/master/MasterServices.java | 9 +- .../hadoop/hbase/master/locking/LockProcedure.java | 8 +- .../master/procedure/MasterProcedureScheduler.java | 115 +- .../resources/hbase-webapps/master/procedures.jsp | 125 +- .../resources/hbase-webapps/master/snapshot.jsp | 2 +- .../hbase-webapps/master/snapshotsStats.jsp | 2 +- .../main/resources/hbase-webapps/master/table.jsp | 2 +- .../hbase-webapps/master/tablesDetailed.jsp | 2 +- .../src/main/resources/hbase-webapps/master/zk.jsp | 2 +- .../hbase/coprocessor/TestMasterObserver.java | 15 + .../hbase/master/MockNoopMasterServices.java | 9 +- .../procedure/TestMasterProcedureScheduler.java | 169 +- hbase-shell/src/main/ruby/hbase/admin.rb | 5 + hbase-shell/src/main/ruby/shell.rb | 3 +- hbase-shell/src/main/ruby/shell/commands.rb | 5 + .../src/main/ruby/shell/commands/list_locks.rb | 60 + hbase-shell/src/main/ruby/shell/formatter.rb | 9 +- hbase-shell/src/test/ruby/shell/list_locks_test.rb | 161 ++ 37 files changed, 5096 insertions(+), 689 deletions(-) create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/LockInfo.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/LockUtil.java create mode 100644 hbase-common/src/test/java/org/apache/hadoop/hbase/TestLockUtil.java create mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockProtos.java create mode 100644 hbase-protocol-shaded/src/main/protobuf/Lock.proto create mode 100644 hbase-shell/src/main/ruby/shell/commands/list_locks.rb create mode 100644 hbase-shell/src/test/ruby/shell/list_locks_test.rb diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index cc14acd..fcb2586 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.LockInfo; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.hadoop.hbase.ProcedureInfo; @@ -1252,6 +1253,14 @@ public interface Admin extends Abortable, Closeable { throws IOException; /** + * List locks. + * @return lock list + * @throws IOException if a remote or network exception occurs + */ + LockInfo[] listLocks() + throws IOException; + + /** * Roll the log writer. I.e. for filesystem based write ahead logs, start writing to a new file. * * Note that the actual rolling of the log writer is asynchronous and may not be complete when diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index adf1496..bea03ba 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -25,8 +25,6 @@ import static org.apache.hadoop.hbase.client.MetricsConnection.CLIENT_SIDE_METRI import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsentEx; -import com.google.common.annotations.VisibleForTesting; - import java.io.Closeable; import java.io.IOException; import java.io.InterruptedIOException; @@ -120,6 +118,8 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.ipc.RemoteException; import org.apache.zookeeper.KeeperException; +import com.google.common.annotations.VisibleForTesting; + import edu.umd.cs.findbugs.annotations.Nullable; /** @@ -1283,6 +1283,13 @@ class ConnectionImplementation implements ClusterConnection, Closeable { } @Override + public MasterProtos.ListLocksResponse listLocks( + RpcController controller, + MasterProtos.ListLocksRequest request) throws ServiceException { + return stub.listLocks(controller, request); + } + + @Override public MasterProtos.AddColumnResponse addColumn( RpcController controller, MasterProtos.AddColumnRequest request) throws ServiceException { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 1f143b5..5080a04 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -55,6 +55,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.LockInfo; +import org.apache.hadoop.hbase.LockUtil; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -110,6 +112,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringP import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema; +import org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse; @@ -151,6 +154,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedur import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest; @@ -2132,6 +2137,28 @@ public class HBaseAdmin implements Admin { } @Override + public LockInfo[] listLocks() throws IOException { + return executeCallable(new MasterCallable(getConnection(), + getRpcControllerFactory()) { + @Override + protected LockInfo[] rpcCall() throws Exception { + ListLocksRequest request = ListLocksRequest.newBuilder().build(); + ListLocksResponse response = master.listLocks(getRpcController(), request); + List locksProto = response.getLockList(); + + LockInfo[] locks = new LockInfo[locksProto.size()]; + + for (int i = 0; i < locks.length; i++) { + LockProtos.Lock lockProto = locksProto.get(i); + locks[i] = LockUtil.convertFromProtoLock(lockProto); + } + + return locks; + } + }); + } + + @Override public HTableDescriptor[] listTableDescriptorsByNamespace(final String name) throws IOException { return executeCallable(new MasterCallable(getConnection(), getRpcControllerFactory()) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java index d70c76f..c26e625 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java @@ -193,6 +193,12 @@ public class ShortCircuitMasterConnection implements MasterKeepAliveConnection { } @Override + public ListLocksResponse listLocks(RpcController controller, + ListLocksRequest request) throws ServiceException { + return stub.listLocks(controller, request); + } + + @Override public ListNamespaceDescriptorsResponse listNamespaceDescriptors(RpcController controller, ListNamespaceDescriptorsRequest request) throws ServiceException { return stub.listNamespaceDescriptors(controller, request); diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml index baabe56..7b3c040 100644 --- a/hbase-common/pom.xml +++ b/hbase-common/pom.xml @@ -220,6 +220,10 @@ test-jar test + + org.apache.hbase + hbase-protocol-shaded + com.google.guava diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/LockInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/LockInfo.java new file mode 100644 index 0000000..d06e67c --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/LockInfo.java @@ -0,0 +1,113 @@ +package org.apache.hadoop.hbase; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class LockInfo { + @InterfaceAudience.Public + @InterfaceStability.Evolving + public enum ResourceType { + SERVER, NAMESPACE, TABLE, REGION + } + + @InterfaceAudience.Public + @InterfaceStability.Evolving + public enum LockType { + EXCLUSIVE, SHARED + } + + @InterfaceAudience.Public + @InterfaceStability.Evolving + public static class WaitingProcedure { + private LockType lockType; + private long procId; + + public WaitingProcedure() { + } + + public LockType getLockType() { + return lockType; + } + + public void setLockType(LockType lockType) { + this.lockType = lockType; + } + + public long getProcId() { + return procId; + } + + public void setProcId(long procId) { + this.procId = procId; + } + } + + private ResourceType resourceType; + private String resourceName; + private LockType lockType; + private long exclusiveLockOwnerProcId; + private int sharedLockCount; + private final List waitingProcedures; + + public LockInfo() { + waitingProcedures = new ArrayList<>(); + } + + public ResourceType getResourceType() { + return resourceType; + } + + public void setResourceType(ResourceType resourceType) { + this.resourceType = resourceType; + } + + public String getResourceName() { + return resourceName; + } + + public void setResourceName(String resourceName) { + this.resourceName = resourceName; + } + + public LockType getLockType() { + return lockType; + } + + public void setLockType(LockType lockType) { + this.lockType = lockType; + } + + public long getExclusiveLockOwnerProcId() { + return exclusiveLockOwnerProcId; + } + + public void setExclusiveLockOwnerProcId(long exclusiveLockOwnerProcId) { + this.exclusiveLockOwnerProcId = exclusiveLockOwnerProcId; + } + + public int getSharedLockCount() { + return sharedLockCount; + } + + public void setSharedLockCount(int sharedLockCount) { + this.sharedLockCount = sharedLockCount; + } + + public List getWaitingProcedures() { + return waitingProcedures; + } + + public void setWaitingProcedures(List waitingProcedures) { + this.waitingProcedures.clear(); + this.waitingProcedures.addAll(waitingProcedures); + } + + public void addWaitingProcedure(WaitingProcedure waitingProcedure) { + waitingProcedures.add(waitingProcedure); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/LockUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/LockUtil.java new file mode 100644 index 0000000..e6a320b --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/LockUtil.java @@ -0,0 +1,109 @@ +package org.apache.hadoop.hbase; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos; +import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos; + +@InterfaceAudience.Private +public class LockUtil { + private LockUtil() { + } + + public static LockProtos.ResourceType convertToProtoResourceType( + LockInfo.ResourceType resourceType) { + switch (resourceType) { + case SERVER: + return LockProtos.ResourceType.RESOURCE_TYPE_SERVER; + case NAMESPACE: + return LockProtos.ResourceType.RESOURCE_TYPE_NAMESPACE; + case TABLE: + return LockProtos.ResourceType.RESOURCE_TYPE_TABLE; + case REGION: + return LockProtos.ResourceType.RESOURCE_TYPE_REGION; + default: + throw new IllegalArgumentException("Unknown resource type: " + resourceType); + } + } + + public static LockInfo.ResourceType convertFromProtoResourceType( + LockProtos.ResourceType resourceTypeProto) { + switch (resourceTypeProto) { + case RESOURCE_TYPE_SERVER: + return LockInfo.ResourceType.SERVER; + case RESOURCE_TYPE_NAMESPACE: + return LockInfo.ResourceType.NAMESPACE; + case RESOURCE_TYPE_TABLE: + return LockInfo.ResourceType.TABLE; + case RESOURCE_TYPE_REGION: + return LockInfo.ResourceType.REGION; + default: + throw new IllegalArgumentException("Unknown resource type: " + resourceTypeProto); + } + } + + public static LockServiceProtos.LockType convertToProtoLockType( + LockInfo.LockType lockType) { + return LockServiceProtos.LockType.valueOf(lockType.name()); + } + + public static LockInfo.LockType convertFromProtoLockType( + LockServiceProtos.LockType lockTypeProto) { + return LockInfo.LockType.valueOf(lockTypeProto.name()); + } + + public static LockProtos.WaitingProcedure convertToProtoWaitingProcedure( + LockInfo.WaitingProcedure waitingProcedure) { + LockProtos.WaitingProcedure.Builder builder = LockProtos.WaitingProcedure.newBuilder(); + + builder + .setLockType(convertToProtoLockType(waitingProcedure.getLockType())) + .setProcId(waitingProcedure.getProcId()); + + return builder.build(); + } + + public static LockInfo.WaitingProcedure convertFromProtoWaitingProcedure( + LockProtos.WaitingProcedure waitingProcedureProto) { + LockInfo.WaitingProcedure waiting = new LockInfo.WaitingProcedure(); + + waiting.setLockType(convertFromProtoLockType(waitingProcedureProto.getLockType())); + waiting.setProcId(waitingProcedureProto.getProcId()); + + return waiting; + } + + public static LockProtos.Lock convertToProtoLock(LockInfo lock) + { + LockProtos.Lock.Builder builder = LockProtos.Lock.newBuilder(); + + builder + .setResourceType(convertToProtoResourceType(lock.getResourceType())) + .setResourceName(lock.getResourceName()) + .setLockType(convertToProtoLockType(lock.getLockType())) + .setExclusiveLockOwnerProcId(lock.getExclusiveLockOwnerProcId()) + .setSharedLockCount(lock.getSharedLockCount()); + + for (LockInfo.WaitingProcedure waitingProcedure : lock.getWaitingProcedures()) { + builder.addWaitingProcedures(convertToProtoWaitingProcedure(waitingProcedure)); + } + + return builder.build(); + } + + public static LockInfo convertFromProtoLock(LockProtos.Lock lockProto) + { + LockInfo lock = new LockInfo(); + + lock.setResourceType(convertFromProtoResourceType(lockProto.getResourceType())); + lock.setResourceName(lockProto.getResourceName()); + lock.setLockType(convertFromProtoLockType(lockProto.getLockType())); + lock.setExclusiveLockOwnerProcId(lockProto.getExclusiveLockOwnerProcId()); + lock.setSharedLockCount(lockProto.getSharedLockCount()); + + for (LockProtos.WaitingProcedure waitingProcedureProto : lockProto.getWaitingProceduresList()) { + lock.addWaitingProcedure(convertFromProtoWaitingProcedure(waitingProcedureProto)); + } + + return lock; + } +} diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestLockUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestLockUtil.java new file mode 100644 index 0000000..dd5fe75 --- /dev/null +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestLockUtil.java @@ -0,0 +1,96 @@ +package org.apache.hadoop.hbase; + +import static org.junit.Assert.assertTrue; + +import org.apache.commons.lang.builder.EqualsBuilder; +import org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos; +import org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category({SmallTests.class}) +public class TestLockUtil { + public TestLockUtil() { + } + + @Test + public void testServer() { + LockInfo lock = new LockInfo(); + lock.setResourceType(LockInfo.ResourceType.SERVER); + lock.setResourceName("server"); + lock.setLockType(LockInfo.LockType.SHARED); + lock.setSharedLockCount(2); + + Lock proto = LockUtil.convertToProtoLock(lock); + LockInfo lock2 = LockUtil.convertFromProtoLock(proto); + + assertTrue(EqualsBuilder.reflectionEquals(lock, lock2)); + } + + @Test + public void testNamespace() { + LockInfo lock = new LockInfo(); + lock.setResourceType(LockInfo.ResourceType.NAMESPACE); + lock.setResourceName("ns"); + lock.setLockType(LockInfo.LockType.EXCLUSIVE); + lock.setExclusiveLockOwnerProcId(1); + + Lock proto = LockUtil.convertToProtoLock(lock); + LockInfo lock2 = LockUtil.convertFromProtoLock(proto); + + assertTrue(EqualsBuilder.reflectionEquals(lock, lock2)); + } + + @Test + public void testTable() { + LockInfo lock = new LockInfo(); + lock.setResourceType(LockInfo.ResourceType.TABLE); + lock.setResourceName("table"); + lock.setLockType(LockInfo.LockType.SHARED); + lock.setSharedLockCount(2); + + Lock proto = LockUtil.convertToProtoLock(lock); + LockInfo lock2 = LockUtil.convertFromProtoLock(proto); + + assertTrue(EqualsBuilder.reflectionEquals(lock, lock2)); + } + + @Test + public void testRegion() { + LockInfo lock = new LockInfo(); + lock.setResourceType(LockInfo.ResourceType.REGION); + lock.setResourceName("region"); + lock.setLockType(LockInfo.LockType.EXCLUSIVE); + lock.setExclusiveLockOwnerProcId(2); + + Lock proto = LockUtil.convertToProtoLock(lock); + LockInfo lock2 = LockUtil.convertFromProtoLock(proto); + + assertTrue(EqualsBuilder.reflectionEquals(lock, lock2)); + } + + @Test + public void testExclusiveWaiting() { + LockInfo.WaitingProcedure waitingProcedure = new LockInfo.WaitingProcedure(); + waitingProcedure.setLockType(LockInfo.LockType.EXCLUSIVE); + waitingProcedure.setProcId(1); + + LockProtos.WaitingProcedure proto = LockUtil.convertToProtoWaitingProcedure(waitingProcedure); + LockInfo.WaitingProcedure waitingProcedure2 = LockUtil.convertFromProtoWaitingProcedure(proto); + + assertTrue(EqualsBuilder.reflectionEquals(waitingProcedure, waitingProcedure2)); + } + + @Test + public void testSharedWaiting() { + LockInfo.WaitingProcedure waitingProcedure = new LockInfo.WaitingProcedure(); + waitingProcedure.setLockType(LockInfo.LockType.SHARED); + waitingProcedure.setProcId(2); + + LockProtos.WaitingProcedure proto = LockUtil.convertToProtoWaitingProcedure(waitingProcedure); + LockInfo.WaitingProcedure waitingProcedure2 = LockUtil.convertFromProtoWaitingProcedure(proto); + + assertTrue(EqualsBuilder.reflectionEquals(waitingProcedure, waitingProcedure2)); + } +} diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureScheduler.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureScheduler.java index 16ff781..759417a 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureScheduler.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureScheduler.java @@ -20,8 +20,10 @@ package org.apache.hadoop.hbase.procedure2; import com.google.common.annotations.VisibleForTesting; +import java.util.List; import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hbase.LockInfo; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -123,6 +125,12 @@ public interface ProcedureScheduler { boolean waitEvent(ProcedureEvent event, Procedure procedure); /** + * List lock queues. + * @return the locks + */ + List listLocks(); + + /** * Returns the number of elements in this queue. * @return the number of elements in this queue. */ diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SimpleProcedureScheduler.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SimpleProcedureScheduler.java index 788f4ff..8c47603 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SimpleProcedureScheduler.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SimpleProcedureScheduler.java @@ -18,10 +18,15 @@ package org.apache.hadoop.hbase.procedure2; -import com.google.common.annotations.VisibleForTesting; +import java.util.Collections; +import java.util.List; + +import org.apache.hadoop.hbase.LockInfo; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; +import com.google.common.annotations.VisibleForTesting; + /** * Simple scheduler for procedures */ @@ -73,4 +78,9 @@ public class SimpleProcedureScheduler extends AbstractProcedureScheduler { @Override public void completionCleanup(Procedure proc) { } + + @Override + public List listLocks() { + return Collections.emptyList(); + } } \ No newline at end of file diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockProtos.java new file mode 100644 index 0000000..9b3edc8 --- /dev/null +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockProtos.java @@ -0,0 +1,2071 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Lock.proto + +package org.apache.hadoop.hbase.shaded.protobuf.generated; + +public final class LockProtos { + private LockProtos() {} + public static void registerAllExtensions( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); + } + /** + * Protobuf enum {@code hbase.pb.ResourceType} + */ + public enum ResourceType + implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { + /** + * RESOURCE_TYPE_SERVER = 1; + */ + RESOURCE_TYPE_SERVER(1), + /** + * RESOURCE_TYPE_NAMESPACE = 2; + */ + RESOURCE_TYPE_NAMESPACE(2), + /** + * RESOURCE_TYPE_TABLE = 3; + */ + RESOURCE_TYPE_TABLE(3), + /** + * RESOURCE_TYPE_REGION = 4; + */ + RESOURCE_TYPE_REGION(4), + ; + + /** + * RESOURCE_TYPE_SERVER = 1; + */ + public static final int RESOURCE_TYPE_SERVER_VALUE = 1; + /** + * RESOURCE_TYPE_NAMESPACE = 2; + */ + public static final int RESOURCE_TYPE_NAMESPACE_VALUE = 2; + /** + * RESOURCE_TYPE_TABLE = 3; + */ + public static final int RESOURCE_TYPE_TABLE_VALUE = 3; + /** + * RESOURCE_TYPE_REGION = 4; + */ + public static final int RESOURCE_TYPE_REGION_VALUE = 4; + + + public final int getNumber() { + return value; + } + + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static ResourceType valueOf(int value) { + return forNumber(value); + } + + public static ResourceType forNumber(int value) { + switch (value) { + case 1: return RESOURCE_TYPE_SERVER; + case 2: return RESOURCE_TYPE_NAMESPACE; + case 3: return RESOURCE_TYPE_TABLE; + case 4: return RESOURCE_TYPE_REGION; + default: return null; + } + } + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< + ResourceType> internalValueMap = + new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap() { + public ResourceType findValueByNumber(int number) { + return ResourceType.forNumber(number); + } + }; + + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.getDescriptor().getEnumTypes().get(0); + } + + private static final ResourceType[] VALUES = values(); + + public static ResourceType valueOf( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private ResourceType(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:hbase.pb.ResourceType) + } + + public interface WaitingProcedureOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.WaitingProcedure) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * required .hbase.pb.LockType lock_type = 1; + */ + boolean hasLockType(); + /** + * required .hbase.pb.LockType lock_type = 1; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType(); + + /** + * required int64 proc_id = 2; + */ + boolean hasProcId(); + /** + * required int64 proc_id = 2; + */ + long getProcId(); + } + /** + * Protobuf type {@code hbase.pb.WaitingProcedure} + */ + public static final class WaitingProcedure extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.WaitingProcedure) + WaitingProcedureOrBuilder { + // Use WaitingProcedure.newBuilder() to construct. + private WaitingProcedure(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private WaitingProcedure() { + lockType_ = 1; + procId_ = 0L; + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WaitingProcedure( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + lockType_ = rawValue; + } + break; + } + case 16: { + bitField0_ |= 0x00000002; + procId_ = input.readInt64(); + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.internal_static_hbase_pb_WaitingProcedure_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.internal_static_hbase_pb_WaitingProcedure_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.Builder.class); + } + + private int bitField0_; + public static final int LOCK_TYPE_FIELD_NUMBER = 1; + private int lockType_; + /** + * required .hbase.pb.LockType lock_type = 1; + */ + public boolean hasLockType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.LockType lock_type = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result; + } + + public static final int PROC_ID_FIELD_NUMBER = 2; + private long procId_; + /** + * required int64 proc_id = 2; + */ + public boolean hasProcId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required int64 proc_id = 2; + */ + public long getProcId() { + return procId_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (!hasLockType()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasProcId()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, lockType_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeInt64(2, procId_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeEnumSize(1, lockType_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeInt64Size(2, procId_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure) obj; + + boolean result = true; + result = result && (hasLockType() == other.hasLockType()); + if (hasLockType()) { + result = result && lockType_ == other.lockType_; + } + result = result && (hasProcId() == other.hasProcId()); + if (hasProcId()) { + result = result && (getProcId() + == other.getProcId()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLockType()) { + hash = (37 * hash) + LOCK_TYPE_FIELD_NUMBER; + hash = (53 * hash) + lockType_; + } + if (hasProcId()) { + hash = (37 * hash) + PROC_ID_FIELD_NUMBER; + hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( + getProcId()); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.WaitingProcedure} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.WaitingProcedure) + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedureOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.internal_static_hbase_pb_WaitingProcedure_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.internal_static_hbase_pb_WaitingProcedure_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + lockType_ = 1; + bitField0_ = (bitField0_ & ~0x00000001); + procId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.internal_static_hbase_pb_WaitingProcedure_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.lockType_ = lockType_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.procId_ = procId_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.getDefaultInstance()) return this; + if (other.hasLockType()) { + setLockType(other.getLockType()); + } + if (other.hasProcId()) { + setProcId(other.getProcId()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (!hasLockType()) { + return false; + } + if (!hasProcId()) { + return false; + } + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private int lockType_ = 1; + /** + * required .hbase.pb.LockType lock_type = 1; + */ + public boolean hasLockType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.LockType lock_type = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result; + } + /** + * required .hbase.pb.LockType lock_type = 1; + */ + public Builder setLockType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + lockType_ = value.getNumber(); + onChanged(); + return this; + } + /** + * required .hbase.pb.LockType lock_type = 1; + */ + public Builder clearLockType() { + bitField0_ = (bitField0_ & ~0x00000001); + lockType_ = 1; + onChanged(); + return this; + } + + private long procId_ ; + /** + * required int64 proc_id = 2; + */ + public boolean hasProcId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required int64 proc_id = 2; + */ + public long getProcId() { + return procId_; + } + /** + * required int64 proc_id = 2; + */ + public Builder setProcId(long value) { + bitField0_ |= 0x00000002; + procId_ = value; + onChanged(); + return this; + } + /** + * required int64 proc_id = 2; + */ + public Builder clearProcId() { + bitField0_ = (bitField0_ & ~0x00000002); + procId_ = 0L; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.WaitingProcedure) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.WaitingProcedure) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public WaitingProcedure parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new WaitingProcedure(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface LockOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.Lock) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * required .hbase.pb.ResourceType resource_type = 1; + */ + boolean hasResourceType(); + /** + * required .hbase.pb.ResourceType resource_type = 1; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType getResourceType(); + + /** + * optional string resource_name = 2; + */ + boolean hasResourceName(); + /** + * optional string resource_name = 2; + */ + java.lang.String getResourceName(); + /** + * optional string resource_name = 2; + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString + getResourceNameBytes(); + + /** + * required .hbase.pb.LockType lock_type = 3; + */ + boolean hasLockType(); + /** + * required .hbase.pb.LockType lock_type = 3; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType(); + + /** + * optional int64 exclusive_lock_owner_proc_id = 4; + */ + boolean hasExclusiveLockOwnerProcId(); + /** + * optional int64 exclusive_lock_owner_proc_id = 4; + */ + long getExclusiveLockOwnerProcId(); + + /** + * optional int32 shared_lock_count = 5; + */ + boolean hasSharedLockCount(); + /** + * optional int32 shared_lock_count = 5; + */ + int getSharedLockCount(); + + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + java.util.List + getWaitingProceduresList(); + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure getWaitingProcedures(int index); + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + int getWaitingProceduresCount(); + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + java.util.List + getWaitingProceduresOrBuilderList(); + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedureOrBuilder getWaitingProceduresOrBuilder( + int index); + } + /** + * Protobuf type {@code hbase.pb.Lock} + */ + public static final class Lock extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.Lock) + LockOrBuilder { + // Use Lock.newBuilder() to construct. + private Lock(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Lock() { + resourceType_ = 1; + resourceName_ = ""; + lockType_ = 1; + exclusiveLockOwnerProcId_ = 0L; + sharedLockCount_ = 0; + waitingProcedures_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Lock( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + resourceType_ = rawValue; + } + break; + } + case 18: { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000002; + resourceName_ = bs; + break; + } + case 24: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(3, rawValue); + } else { + bitField0_ |= 0x00000004; + lockType_ = rawValue; + } + break; + } + case 32: { + bitField0_ |= 0x00000008; + exclusiveLockOwnerProcId_ = input.readInt64(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + sharedLockCount_ = input.readInt32(); + break; + } + case 50: { + if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + waitingProcedures_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000020; + } + waitingProcedures_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.PARSER, extensionRegistry)); + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + waitingProcedures_ = java.util.Collections.unmodifiableList(waitingProcedures_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.internal_static_hbase_pb_Lock_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.internal_static_hbase_pb_Lock_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.Builder.class); + } + + private int bitField0_; + public static final int RESOURCE_TYPE_FIELD_NUMBER = 1; + private int resourceType_; + /** + * required .hbase.pb.ResourceType resource_type = 1; + */ + public boolean hasResourceType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.ResourceType resource_type = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType getResourceType() { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType.valueOf(resourceType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType.RESOURCE_TYPE_SERVER : result; + } + + public static final int RESOURCE_NAME_FIELD_NUMBER = 2; + private volatile java.lang.Object resourceName_; + /** + * optional string resource_name = 2; + */ + public boolean hasResourceName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string resource_name = 2; + */ + public java.lang.String getResourceName() { + java.lang.Object ref = resourceName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = + (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + resourceName_ = s; + } + return s; + } + } + /** + * optional string resource_name = 2; + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString + getResourceNameBytes() { + java.lang.Object ref = resourceName_; + if (ref instanceof java.lang.String) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + resourceName_ = b; + return b; + } else { + return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + } + } + + public static final int LOCK_TYPE_FIELD_NUMBER = 3; + private int lockType_; + /** + * required .hbase.pb.LockType lock_type = 3; + */ + public boolean hasLockType() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required .hbase.pb.LockType lock_type = 3; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result; + } + + public static final int EXCLUSIVE_LOCK_OWNER_PROC_ID_FIELD_NUMBER = 4; + private long exclusiveLockOwnerProcId_; + /** + * optional int64 exclusive_lock_owner_proc_id = 4; + */ + public boolean hasExclusiveLockOwnerProcId() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional int64 exclusive_lock_owner_proc_id = 4; + */ + public long getExclusiveLockOwnerProcId() { + return exclusiveLockOwnerProcId_; + } + + public static final int SHARED_LOCK_COUNT_FIELD_NUMBER = 5; + private int sharedLockCount_; + /** + * optional int32 shared_lock_count = 5; + */ + public boolean hasSharedLockCount() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional int32 shared_lock_count = 5; + */ + public int getSharedLockCount() { + return sharedLockCount_; + } + + public static final int WAITINGPROCEDURES_FIELD_NUMBER = 6; + private java.util.List waitingProcedures_; + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public java.util.List getWaitingProceduresList() { + return waitingProcedures_; + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public java.util.List + getWaitingProceduresOrBuilderList() { + return waitingProcedures_; + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public int getWaitingProceduresCount() { + return waitingProcedures_.size(); + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure getWaitingProcedures(int index) { + return waitingProcedures_.get(index); + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedureOrBuilder getWaitingProceduresOrBuilder( + int index) { + return waitingProcedures_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (!hasResourceType()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasLockType()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getWaitingProceduresCount(); i++) { + if (!getWaitingProcedures(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, resourceType_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, resourceName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeEnum(3, lockType_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeInt64(4, exclusiveLockOwnerProcId_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeInt32(5, sharedLockCount_); + } + for (int i = 0; i < waitingProcedures_.size(); i++) { + output.writeMessage(6, waitingProcedures_.get(i)); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeEnumSize(1, resourceType_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, resourceName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeEnumSize(3, lockType_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeInt64Size(4, exclusiveLockOwnerProcId_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeInt32Size(5, sharedLockCount_); + } + for (int i = 0; i < waitingProcedures_.size(); i++) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeMessageSize(6, waitingProcedures_.get(i)); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock other = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock) obj; + + boolean result = true; + result = result && (hasResourceType() == other.hasResourceType()); + if (hasResourceType()) { + result = result && resourceType_ == other.resourceType_; + } + result = result && (hasResourceName() == other.hasResourceName()); + if (hasResourceName()) { + result = result && getResourceName() + .equals(other.getResourceName()); + } + result = result && (hasLockType() == other.hasLockType()); + if (hasLockType()) { + result = result && lockType_ == other.lockType_; + } + result = result && (hasExclusiveLockOwnerProcId() == other.hasExclusiveLockOwnerProcId()); + if (hasExclusiveLockOwnerProcId()) { + result = result && (getExclusiveLockOwnerProcId() + == other.getExclusiveLockOwnerProcId()); + } + result = result && (hasSharedLockCount() == other.hasSharedLockCount()); + if (hasSharedLockCount()) { + result = result && (getSharedLockCount() + == other.getSharedLockCount()); + } + result = result && getWaitingProceduresList() + .equals(other.getWaitingProceduresList()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResourceType()) { + hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER; + hash = (53 * hash) + resourceType_; + } + if (hasResourceName()) { + hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; + hash = (53 * hash) + getResourceName().hashCode(); + } + if (hasLockType()) { + hash = (37 * hash) + LOCK_TYPE_FIELD_NUMBER; + hash = (53 * hash) + lockType_; + } + if (hasExclusiveLockOwnerProcId()) { + hash = (37 * hash) + EXCLUSIVE_LOCK_OWNER_PROC_ID_FIELD_NUMBER; + hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( + getExclusiveLockOwnerProcId()); + } + if (hasSharedLockCount()) { + hash = (37 * hash) + SHARED_LOCK_COUNT_FIELD_NUMBER; + hash = (53 * hash) + getSharedLockCount(); + } + if (getWaitingProceduresCount() > 0) { + hash = (37 * hash) + WAITINGPROCEDURES_FIELD_NUMBER; + hash = (53 * hash) + getWaitingProceduresList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.Lock} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.Lock) + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.LockOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.internal_static_hbase_pb_Lock_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.internal_static_hbase_pb_Lock_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.class, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getWaitingProceduresFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + resourceType_ = 1; + bitField0_ = (bitField0_ & ~0x00000001); + resourceName_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + lockType_ = 1; + bitField0_ = (bitField0_ & ~0x00000004); + exclusiveLockOwnerProcId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + sharedLockCount_ = 0; + bitField0_ = (bitField0_ & ~0x00000010); + if (waitingProceduresBuilder_ == null) { + waitingProcedures_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + } else { + waitingProceduresBuilder_.clear(); + } + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.internal_static_hbase_pb_Lock_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock result = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.resourceType_ = resourceType_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.resourceName_ = resourceName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.lockType_ = lockType_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.exclusiveLockOwnerProcId_ = exclusiveLockOwnerProcId_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.sharedLockCount_ = sharedLockCount_; + if (waitingProceduresBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020)) { + waitingProcedures_ = java.util.Collections.unmodifiableList(waitingProcedures_); + bitField0_ = (bitField0_ & ~0x00000020); + } + result.waitingProcedures_ = waitingProcedures_; + } else { + result.waitingProcedures_ = waitingProceduresBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.getDefaultInstance()) return this; + if (other.hasResourceType()) { + setResourceType(other.getResourceType()); + } + if (other.hasResourceName()) { + bitField0_ |= 0x00000002; + resourceName_ = other.resourceName_; + onChanged(); + } + if (other.hasLockType()) { + setLockType(other.getLockType()); + } + if (other.hasExclusiveLockOwnerProcId()) { + setExclusiveLockOwnerProcId(other.getExclusiveLockOwnerProcId()); + } + if (other.hasSharedLockCount()) { + setSharedLockCount(other.getSharedLockCount()); + } + if (waitingProceduresBuilder_ == null) { + if (!other.waitingProcedures_.isEmpty()) { + if (waitingProcedures_.isEmpty()) { + waitingProcedures_ = other.waitingProcedures_; + bitField0_ = (bitField0_ & ~0x00000020); + } else { + ensureWaitingProceduresIsMutable(); + waitingProcedures_.addAll(other.waitingProcedures_); + } + onChanged(); + } + } else { + if (!other.waitingProcedures_.isEmpty()) { + if (waitingProceduresBuilder_.isEmpty()) { + waitingProceduresBuilder_.dispose(); + waitingProceduresBuilder_ = null; + waitingProcedures_ = other.waitingProcedures_; + bitField0_ = (bitField0_ & ~0x00000020); + waitingProceduresBuilder_ = + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getWaitingProceduresFieldBuilder() : null; + } else { + waitingProceduresBuilder_.addAllMessages(other.waitingProcedures_); + } + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (!hasResourceType()) { + return false; + } + if (!hasLockType()) { + return false; + } + for (int i = 0; i < getWaitingProceduresCount(); i++) { + if (!getWaitingProcedures(i).isInitialized()) { + return false; + } + } + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private int resourceType_ = 1; + /** + * required .hbase.pb.ResourceType resource_type = 1; + */ + public boolean hasResourceType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.ResourceType resource_type = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType getResourceType() { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType.valueOf(resourceType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType.RESOURCE_TYPE_SERVER : result; + } + /** + * required .hbase.pb.ResourceType resource_type = 1; + */ + public Builder setResourceType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.ResourceType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + resourceType_ = value.getNumber(); + onChanged(); + return this; + } + /** + * required .hbase.pb.ResourceType resource_type = 1; + */ + public Builder clearResourceType() { + bitField0_ = (bitField0_ & ~0x00000001); + resourceType_ = 1; + onChanged(); + return this; + } + + private java.lang.Object resourceName_ = ""; + /** + * optional string resource_name = 2; + */ + public boolean hasResourceName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string resource_name = 2; + */ + public java.lang.String getResourceName() { + java.lang.Object ref = resourceName_; + if (!(ref instanceof java.lang.String)) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = + (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + resourceName_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string resource_name = 2; + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString + getResourceNameBytes() { + java.lang.Object ref = resourceName_; + if (ref instanceof String) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + resourceName_ = b; + return b; + } else { + return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + } + } + /** + * optional string resource_name = 2; + */ + public Builder setResourceName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + resourceName_ = value; + onChanged(); + return this; + } + /** + * optional string resource_name = 2; + */ + public Builder clearResourceName() { + bitField0_ = (bitField0_ & ~0x00000002); + resourceName_ = getDefaultInstance().getResourceName(); + onChanged(); + return this; + } + /** + * optional string resource_name = 2; + */ + public Builder setResourceNameBytes( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + resourceName_ = value; + onChanged(); + return this; + } + + private int lockType_ = 1; + /** + * required .hbase.pb.LockType lock_type = 3; + */ + public boolean hasLockType() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required .hbase.pb.LockType lock_type = 3; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType getLockType() { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType result = org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.valueOf(lockType_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType.EXCLUSIVE : result; + } + /** + * required .hbase.pb.LockType lock_type = 3; + */ + public Builder setLockType(org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + lockType_ = value.getNumber(); + onChanged(); + return this; + } + /** + * required .hbase.pb.LockType lock_type = 3; + */ + public Builder clearLockType() { + bitField0_ = (bitField0_ & ~0x00000004); + lockType_ = 1; + onChanged(); + return this; + } + + private long exclusiveLockOwnerProcId_ ; + /** + * optional int64 exclusive_lock_owner_proc_id = 4; + */ + public boolean hasExclusiveLockOwnerProcId() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional int64 exclusive_lock_owner_proc_id = 4; + */ + public long getExclusiveLockOwnerProcId() { + return exclusiveLockOwnerProcId_; + } + /** + * optional int64 exclusive_lock_owner_proc_id = 4; + */ + public Builder setExclusiveLockOwnerProcId(long value) { + bitField0_ |= 0x00000008; + exclusiveLockOwnerProcId_ = value; + onChanged(); + return this; + } + /** + * optional int64 exclusive_lock_owner_proc_id = 4; + */ + public Builder clearExclusiveLockOwnerProcId() { + bitField0_ = (bitField0_ & ~0x00000008); + exclusiveLockOwnerProcId_ = 0L; + onChanged(); + return this; + } + + private int sharedLockCount_ ; + /** + * optional int32 shared_lock_count = 5; + */ + public boolean hasSharedLockCount() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional int32 shared_lock_count = 5; + */ + public int getSharedLockCount() { + return sharedLockCount_; + } + /** + * optional int32 shared_lock_count = 5; + */ + public Builder setSharedLockCount(int value) { + bitField0_ |= 0x00000010; + sharedLockCount_ = value; + onChanged(); + return this; + } + /** + * optional int32 shared_lock_count = 5; + */ + public Builder clearSharedLockCount() { + bitField0_ = (bitField0_ & ~0x00000010); + sharedLockCount_ = 0; + onChanged(); + return this; + } + + private java.util.List waitingProcedures_ = + java.util.Collections.emptyList(); + private void ensureWaitingProceduresIsMutable() { + if (!((bitField0_ & 0x00000020) == 0x00000020)) { + waitingProcedures_ = new java.util.ArrayList(waitingProcedures_); + bitField0_ |= 0x00000020; + } + } + + private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedureOrBuilder> waitingProceduresBuilder_; + + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public java.util.List getWaitingProceduresList() { + if (waitingProceduresBuilder_ == null) { + return java.util.Collections.unmodifiableList(waitingProcedures_); + } else { + return waitingProceduresBuilder_.getMessageList(); + } + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public int getWaitingProceduresCount() { + if (waitingProceduresBuilder_ == null) { + return waitingProcedures_.size(); + } else { + return waitingProceduresBuilder_.getCount(); + } + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure getWaitingProcedures(int index) { + if (waitingProceduresBuilder_ == null) { + return waitingProcedures_.get(index); + } else { + return waitingProceduresBuilder_.getMessage(index); + } + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public Builder setWaitingProcedures( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure value) { + if (waitingProceduresBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureWaitingProceduresIsMutable(); + waitingProcedures_.set(index, value); + onChanged(); + } else { + waitingProceduresBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public Builder setWaitingProcedures( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.Builder builderForValue) { + if (waitingProceduresBuilder_ == null) { + ensureWaitingProceduresIsMutable(); + waitingProcedures_.set(index, builderForValue.build()); + onChanged(); + } else { + waitingProceduresBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public Builder addWaitingProcedures(org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure value) { + if (waitingProceduresBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureWaitingProceduresIsMutable(); + waitingProcedures_.add(value); + onChanged(); + } else { + waitingProceduresBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public Builder addWaitingProcedures( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure value) { + if (waitingProceduresBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureWaitingProceduresIsMutable(); + waitingProcedures_.add(index, value); + onChanged(); + } else { + waitingProceduresBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public Builder addWaitingProcedures( + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.Builder builderForValue) { + if (waitingProceduresBuilder_ == null) { + ensureWaitingProceduresIsMutable(); + waitingProcedures_.add(builderForValue.build()); + onChanged(); + } else { + waitingProceduresBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public Builder addWaitingProcedures( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.Builder builderForValue) { + if (waitingProceduresBuilder_ == null) { + ensureWaitingProceduresIsMutable(); + waitingProcedures_.add(index, builderForValue.build()); + onChanged(); + } else { + waitingProceduresBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public Builder addAllWaitingProcedures( + java.lang.Iterable values) { + if (waitingProceduresBuilder_ == null) { + ensureWaitingProceduresIsMutable(); + org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, waitingProcedures_); + onChanged(); + } else { + waitingProceduresBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public Builder clearWaitingProcedures() { + if (waitingProceduresBuilder_ == null) { + waitingProcedures_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + } else { + waitingProceduresBuilder_.clear(); + } + return this; + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public Builder removeWaitingProcedures(int index) { + if (waitingProceduresBuilder_ == null) { + ensureWaitingProceduresIsMutable(); + waitingProcedures_.remove(index); + onChanged(); + } else { + waitingProceduresBuilder_.remove(index); + } + return this; + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.Builder getWaitingProceduresBuilder( + int index) { + return getWaitingProceduresFieldBuilder().getBuilder(index); + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedureOrBuilder getWaitingProceduresOrBuilder( + int index) { + if (waitingProceduresBuilder_ == null) { + return waitingProcedures_.get(index); } else { + return waitingProceduresBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public java.util.List + getWaitingProceduresOrBuilderList() { + if (waitingProceduresBuilder_ != null) { + return waitingProceduresBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(waitingProcedures_); + } + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.Builder addWaitingProceduresBuilder() { + return getWaitingProceduresFieldBuilder().addBuilder( + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.getDefaultInstance()); + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.Builder addWaitingProceduresBuilder( + int index) { + return getWaitingProceduresFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.getDefaultInstance()); + } + /** + * repeated .hbase.pb.WaitingProcedure waitingProcedures = 6; + */ + public java.util.List + getWaitingProceduresBuilderList() { + return getWaitingProceduresFieldBuilder().getBuilderList(); + } + private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedureOrBuilder> + getWaitingProceduresFieldBuilder() { + if (waitingProceduresBuilder_ == null) { + waitingProceduresBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.WaitingProcedureOrBuilder>( + waitingProcedures_, + ((bitField0_ & 0x00000020) == 0x00000020), + getParentForChildren(), + isClean()); + waitingProcedures_ = null; + } + return waitingProceduresBuilder_; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.Lock) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.Lock) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public Lock parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new Lock(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_WaitingProcedure_descriptor; + private static final + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_hbase_pb_WaitingProcedure_fieldAccessorTable; + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_Lock_descriptor; + private static final + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_hbase_pb_Lock_fieldAccessorTable; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\nLock.proto\022\010hbase.pb\032\021LockService.prot" + + "o\"J\n\020WaitingProcedure\022%\n\tlock_type\030\001 \002(\016" + + "2\022.hbase.pb.LockType\022\017\n\007proc_id\030\002 \002(\003\"\353\001" + + "\n\004Lock\022-\n\rresource_type\030\001 \002(\0162\026.hbase.pb" + + ".ResourceType\022\025\n\rresource_name\030\002 \001(\t\022%\n\t" + + "lock_type\030\003 \002(\0162\022.hbase.pb.LockType\022$\n\034e" + + "xclusive_lock_owner_proc_id\030\004 \001(\003\022\031\n\021sha" + + "red_lock_count\030\005 \001(\005\0225\n\021waitingProcedure" + + "s\030\006 \003(\0132\032.hbase.pb.WaitingProcedure*x\n\014R" + + "esourceType\022\030\n\024RESOURCE_TYPE_SERVER\020\001\022\033\n", + "\027RESOURCE_TYPE_NAMESPACE\020\002\022\027\n\023RESOURCE_T" + + "YPE_TABLE\020\003\022\030\n\024RESOURCE_TYPE_REGION\020\004BG\n" + + "1org.apache.hadoop.hbase.shaded.protobuf" + + ".generatedB\nLockProtosH\001\210\001\001\240\001\001" + }; + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.getDescriptor(), + }, assigner); + internal_static_hbase_pb_WaitingProcedure_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_WaitingProcedure_fieldAccessorTable = new + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_WaitingProcedure_descriptor, + new java.lang.String[] { "LockType", "ProcId", }); + internal_static_hbase_pb_Lock_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_Lock_fieldAccessorTable = new + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_Lock_descriptor, + new java.lang.String[] { "ResourceType", "ResourceName", "LockType", "ExclusiveLockOwnerProcId", "SharedLockCount", "WaitingProcedures", }); + org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java index 52b0ce5..06925e1 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java @@ -42223,7 +42223,7 @@ public final class MasterProtos { * required .hbase.pb.SnapshotDescription snapshot = 1; */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< @@ -61787,39 +61787,1166 @@ public final class MasterProtos { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse) { - return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse)other); + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance()) return this; + if (procedureBuilder_ == null) { + if (!other.procedure_.isEmpty()) { + if (procedure_.isEmpty()) { + procedure_ = other.procedure_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureProcedureIsMutable(); + procedure_.addAll(other.procedure_); + } + onChanged(); + } + } else { + if (!other.procedure_.isEmpty()) { + if (procedureBuilder_.isEmpty()) { + procedureBuilder_.dispose(); + procedureBuilder_ = null; + procedure_ = other.procedure_; + bitField0_ = (bitField0_ & ~0x00000001); + procedureBuilder_ = + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getProcedureFieldBuilder() : null; + } else { + procedureBuilder_.addAllMessages(other.procedure_); + } + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getProcedureCount(); i++) { + if (!getProcedure(i).isInitialized()) { + return false; + } + } + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List procedure_ = + java.util.Collections.emptyList(); + private void ensureProcedureIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + procedure_ = new java.util.ArrayList(procedure_); + bitField0_ |= 0x00000001; + } + } + + private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> procedureBuilder_; + + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public java.util.List getProcedureList() { + if (procedureBuilder_ == null) { + return java.util.Collections.unmodifiableList(procedure_); + } else { + return procedureBuilder_.getMessageList(); + } + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public int getProcedureCount() { + if (procedureBuilder_ == null) { + return procedure_.size(); + } else { + return procedureBuilder_.getCount(); + } + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index) { + if (procedureBuilder_ == null) { + return procedure_.get(index); + } else { + return procedureBuilder_.getMessage(index); + } + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public Builder setProcedure( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) { + if (procedureBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureProcedureIsMutable(); + procedure_.set(index, value); + onChanged(); + } else { + procedureBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public Builder setProcedure( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) { + if (procedureBuilder_ == null) { + ensureProcedureIsMutable(); + procedure_.set(index, builderForValue.build()); + onChanged(); + } else { + procedureBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public Builder addProcedure(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) { + if (procedureBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureProcedureIsMutable(); + procedure_.add(value); + onChanged(); + } else { + procedureBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public Builder addProcedure( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) { + if (procedureBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureProcedureIsMutable(); + procedure_.add(index, value); + onChanged(); + } else { + procedureBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public Builder addProcedure( + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) { + if (procedureBuilder_ == null) { + ensureProcedureIsMutable(); + procedure_.add(builderForValue.build()); + onChanged(); + } else { + procedureBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public Builder addProcedure( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) { + if (procedureBuilder_ == null) { + ensureProcedureIsMutable(); + procedure_.add(index, builderForValue.build()); + onChanged(); + } else { + procedureBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public Builder addAllProcedure( + java.lang.Iterable values) { + if (procedureBuilder_ == null) { + ensureProcedureIsMutable(); + org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, procedure_); + onChanged(); + } else { + procedureBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public Builder clearProcedure() { + if (procedureBuilder_ == null) { + procedure_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + procedureBuilder_.clear(); + } + return this; + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public Builder removeProcedure(int index) { + if (procedureBuilder_ == null) { + ensureProcedureIsMutable(); + procedure_.remove(index); + onChanged(); + } else { + procedureBuilder_.remove(index); + } + return this; + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder getProcedureBuilder( + int index) { + return getProcedureFieldBuilder().getBuilder(index); + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder( + int index) { + if (procedureBuilder_ == null) { + return procedure_.get(index); } else { + return procedureBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public java.util.List + getProcedureOrBuilderList() { + if (procedureBuilder_ != null) { + return procedureBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(procedure_); + } + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder() { + return getProcedureFieldBuilder().addBuilder( + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance()); + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder( + int index) { + return getProcedureFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance()); + } + /** + * repeated .hbase.pb.Procedure procedure = 1; + */ + public java.util.List + getProcedureBuilderList() { + return getProcedureFieldBuilder().getBuilderList(); + } + private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> + getProcedureFieldBuilder() { + if (procedureBuilder_ == null) { + procedureBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>( + procedure_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + procedure_ = null; + } + return procedureBuilder_; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.ListProceduresResponse) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ListProceduresResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public ListProceduresResponse parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new ListProceduresResponse(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ListLocksRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ListLocksRequest) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code hbase.pb.ListLocksRequest} + */ + public static final class ListLocksRequest extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ListLocksRequest) + ListLocksRequestOrBuilder { + // Use ListLocksRequest.newBuilder() to construct. + private ListLocksRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ListLocksRequest() { + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ListLocksRequest( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListLocksRequest_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListLocksRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest.Builder.class); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest) obj; + + boolean result = true; + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ListLocksRequest} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ListLocksRequest) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequestOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListLocksRequest_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListLocksRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListLocksRequest_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest(this); + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.ListLocksRequest) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ListLocksRequest) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public ListLocksRequest parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new ListLocksRequest(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ListLocksResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ListLocksResponse) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * repeated .hbase.pb.Lock lock = 1; + */ + java.util.List + getLockList(); + /** + * repeated .hbase.pb.Lock lock = 1; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock getLock(int index); + /** + * repeated .hbase.pb.Lock lock = 1; + */ + int getLockCount(); + /** + * repeated .hbase.pb.Lock lock = 1; + */ + java.util.List + getLockOrBuilderList(); + /** + * repeated .hbase.pb.Lock lock = 1; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.LockOrBuilder getLockOrBuilder( + int index); + } + /** + * Protobuf type {@code hbase.pb.ListLocksResponse} + */ + public static final class ListLocksResponse extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ListLocksResponse) + ListLocksResponseOrBuilder { + // Use ListLocksResponse.newBuilder() to construct. + private ListLocksResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ListLocksResponse() { + lock_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ListLocksResponse( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + lock_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + lock_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.PARSER, extensionRegistry)); + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + lock_ = java.util.Collections.unmodifiableList(lock_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListLocksResponse_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListLocksResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.Builder.class); + } + + public static final int LOCK_FIELD_NUMBER = 1; + private java.util.List lock_; + /** + * repeated .hbase.pb.Lock lock = 1; + */ + public java.util.List getLockList() { + return lock_; + } + /** + * repeated .hbase.pb.Lock lock = 1; + */ + public java.util.List + getLockOrBuilderList() { + return lock_; + } + /** + * repeated .hbase.pb.Lock lock = 1; + */ + public int getLockCount() { + return lock_.size(); + } + /** + * repeated .hbase.pb.Lock lock = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock getLock(int index) { + return lock_.get(index); + } + /** + * repeated .hbase.pb.Lock lock = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.LockOrBuilder getLockOrBuilder( + int index) { + return lock_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + for (int i = 0; i < getLockCount(); i++) { + if (!getLock(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < lock_.size(); i++) { + output.writeMessage(1, lock_.get(i)); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < lock_.size(); i++) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeMessageSize(1, lock_.get(i)); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse) obj; + + boolean result = true; + result = result && getLockList() + .equals(other.getLockList()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getLockCount() > 0) { + hash = (37 * hash) + LOCK_FIELD_NUMBER; + hash = (53 * hash) + getLockList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ListLocksResponse} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ListLocksResponse) + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponseOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListLocksResponse_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListLocksResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getLockFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (lockBuilder_ == null) { + lock_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + lockBuilder_.clear(); + } + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListLocksResponse_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse(this); + int from_bitField0_ = bitField0_; + if (lockBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + lock_ = java.util.Collections.unmodifiableList(lock_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.lock_ = lock_; + } else { + result.lock_ = lockBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse other) { - if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance()) return this; - if (procedureBuilder_ == null) { - if (!other.procedure_.isEmpty()) { - if (procedure_.isEmpty()) { - procedure_ = other.procedure_; + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.getDefaultInstance()) return this; + if (lockBuilder_ == null) { + if (!other.lock_.isEmpty()) { + if (lock_.isEmpty()) { + lock_ = other.lock_; bitField0_ = (bitField0_ & ~0x00000001); } else { - ensureProcedureIsMutable(); - procedure_.addAll(other.procedure_); + ensureLockIsMutable(); + lock_.addAll(other.lock_); } onChanged(); } } else { - if (!other.procedure_.isEmpty()) { - if (procedureBuilder_.isEmpty()) { - procedureBuilder_.dispose(); - procedureBuilder_ = null; - procedure_ = other.procedure_; + if (!other.lock_.isEmpty()) { + if (lockBuilder_.isEmpty()) { + lockBuilder_.dispose(); + lockBuilder_ = null; + lock_ = other.lock_; bitField0_ = (bitField0_ & ~0x00000001); - procedureBuilder_ = + lockBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getProcedureFieldBuilder() : null; + getLockFieldBuilder() : null; } else { - procedureBuilder_.addAllMessages(other.procedure_); + lockBuilder_.addAllMessages(other.lock_); } } } @@ -61829,8 +62956,8 @@ public final class MasterProtos { } public final boolean isInitialized() { - for (int i = 0; i < getProcedureCount(); i++) { - if (!getProcedure(i).isInitialized()) { + for (int i = 0; i < getLockCount(); i++) { + if (!getLock(i).isInitialized()) { return false; } } @@ -61841,11 +62968,11 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse parsedMessage = null; + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -61856,244 +62983,244 @@ public final class MasterProtos { } private int bitField0_; - private java.util.List procedure_ = + private java.util.List lock_ = java.util.Collections.emptyList(); - private void ensureProcedureIsMutable() { + private void ensureLockIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { - procedure_ = new java.util.ArrayList(procedure_); + lock_ = new java.util.ArrayList(lock_); bitField0_ |= 0x00000001; } } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> procedureBuilder_; + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.LockOrBuilder> lockBuilder_; /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public java.util.List getProcedureList() { - if (procedureBuilder_ == null) { - return java.util.Collections.unmodifiableList(procedure_); + public java.util.List getLockList() { + if (lockBuilder_ == null) { + return java.util.Collections.unmodifiableList(lock_); } else { - return procedureBuilder_.getMessageList(); + return lockBuilder_.getMessageList(); } } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public int getProcedureCount() { - if (procedureBuilder_ == null) { - return procedure_.size(); + public int getLockCount() { + if (lockBuilder_ == null) { + return lock_.size(); } else { - return procedureBuilder_.getCount(); + return lockBuilder_.getCount(); } } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index) { - if (procedureBuilder_ == null) { - return procedure_.get(index); + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock getLock(int index) { + if (lockBuilder_ == null) { + return lock_.get(index); } else { - return procedureBuilder_.getMessage(index); + return lockBuilder_.getMessage(index); } } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public Builder setProcedure( - int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) { - if (procedureBuilder_ == null) { + public Builder setLock( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock value) { + if (lockBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureProcedureIsMutable(); - procedure_.set(index, value); + ensureLockIsMutable(); + lock_.set(index, value); onChanged(); } else { - procedureBuilder_.setMessage(index, value); + lockBuilder_.setMessage(index, value); } return this; } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public Builder setProcedure( - int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) { - if (procedureBuilder_ == null) { - ensureProcedureIsMutable(); - procedure_.set(index, builderForValue.build()); + public Builder setLock( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.Builder builderForValue) { + if (lockBuilder_ == null) { + ensureLockIsMutable(); + lock_.set(index, builderForValue.build()); onChanged(); } else { - procedureBuilder_.setMessage(index, builderForValue.build()); + lockBuilder_.setMessage(index, builderForValue.build()); } return this; } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public Builder addProcedure(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) { - if (procedureBuilder_ == null) { + public Builder addLock(org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock value) { + if (lockBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureProcedureIsMutable(); - procedure_.add(value); + ensureLockIsMutable(); + lock_.add(value); onChanged(); } else { - procedureBuilder_.addMessage(value); + lockBuilder_.addMessage(value); } return this; } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public Builder addProcedure( - int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure value) { - if (procedureBuilder_ == null) { + public Builder addLock( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock value) { + if (lockBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - ensureProcedureIsMutable(); - procedure_.add(index, value); + ensureLockIsMutable(); + lock_.add(index, value); onChanged(); } else { - procedureBuilder_.addMessage(index, value); + lockBuilder_.addMessage(index, value); } return this; } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public Builder addProcedure( - org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) { - if (procedureBuilder_ == null) { - ensureProcedureIsMutable(); - procedure_.add(builderForValue.build()); + public Builder addLock( + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.Builder builderForValue) { + if (lockBuilder_ == null) { + ensureLockIsMutable(); + lock_.add(builderForValue.build()); onChanged(); } else { - procedureBuilder_.addMessage(builderForValue.build()); + lockBuilder_.addMessage(builderForValue.build()); } return this; } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public Builder addProcedure( - int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) { - if (procedureBuilder_ == null) { - ensureProcedureIsMutable(); - procedure_.add(index, builderForValue.build()); + public Builder addLock( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.Builder builderForValue) { + if (lockBuilder_ == null) { + ensureLockIsMutable(); + lock_.add(index, builderForValue.build()); onChanged(); } else { - procedureBuilder_.addMessage(index, builderForValue.build()); + lockBuilder_.addMessage(index, builderForValue.build()); } return this; } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public Builder addAllProcedure( - java.lang.Iterable values) { - if (procedureBuilder_ == null) { - ensureProcedureIsMutable(); + public Builder addAllLock( + java.lang.Iterable values) { + if (lockBuilder_ == null) { + ensureLockIsMutable(); org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, procedure_); + values, lock_); onChanged(); } else { - procedureBuilder_.addAllMessages(values); + lockBuilder_.addAllMessages(values); } return this; } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public Builder clearProcedure() { - if (procedureBuilder_ == null) { - procedure_ = java.util.Collections.emptyList(); + public Builder clearLock() { + if (lockBuilder_ == null) { + lock_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { - procedureBuilder_.clear(); + lockBuilder_.clear(); } return this; } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public Builder removeProcedure(int index) { - if (procedureBuilder_ == null) { - ensureProcedureIsMutable(); - procedure_.remove(index); + public Builder removeLock(int index) { + if (lockBuilder_ == null) { + ensureLockIsMutable(); + lock_.remove(index); onChanged(); } else { - procedureBuilder_.remove(index); + lockBuilder_.remove(index); } return this; } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder getProcedureBuilder( + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.Builder getLockBuilder( int index) { - return getProcedureFieldBuilder().getBuilder(index); + return getLockFieldBuilder().getBuilder(index); } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder( + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.LockOrBuilder getLockOrBuilder( int index) { - if (procedureBuilder_ == null) { - return procedure_.get(index); } else { - return procedureBuilder_.getMessageOrBuilder(index); + if (lockBuilder_ == null) { + return lock_.get(index); } else { + return lockBuilder_.getMessageOrBuilder(index); } } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public java.util.List - getProcedureOrBuilderList() { - if (procedureBuilder_ != null) { - return procedureBuilder_.getMessageOrBuilderList(); + public java.util.List + getLockOrBuilderList() { + if (lockBuilder_ != null) { + return lockBuilder_.getMessageOrBuilderList(); } else { - return java.util.Collections.unmodifiableList(procedure_); + return java.util.Collections.unmodifiableList(lock_); } } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder() { - return getProcedureFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance()); + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.Builder addLockBuilder() { + return getLockFieldBuilder().addBuilder( + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.getDefaultInstance()); } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder( + public org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.Builder addLockBuilder( int index) { - return getProcedureFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance()); + return getLockFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.getDefaultInstance()); } /** - * repeated .hbase.pb.Procedure procedure = 1; + * repeated .hbase.pb.Lock lock = 1; */ - public java.util.List - getProcedureBuilderList() { - return getProcedureFieldBuilder().getBuilderList(); + public java.util.List + getLockBuilderList() { + return getLockFieldBuilder().getBuilderList(); } private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> - getProcedureFieldBuilder() { - if (procedureBuilder_ == null) { - procedureBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>( - procedure_, + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.LockOrBuilder> + getLockFieldBuilder() { + if (lockBuilder_ == null) { + lockBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.Lock.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.LockOrBuilder>( + lock_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); - procedure_ = null; + lock_ = null; } - return procedureBuilder_; + return lockBuilder_; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { @@ -62106,39 +63233,39 @@ public final class MasterProtos { } - // @@protoc_insertion_point(builder_scope:hbase.pb.ListProceduresResponse) + // @@protoc_insertion_point(builder_scope:hbase.pb.ListLocksResponse) } - // @@protoc_insertion_point(class_scope:hbase.pb.ListProceduresResponse) - private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse DEFAULT_INSTANCE; + // @@protoc_insertion_point(class_scope:hbase.pb.ListLocksResponse) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse(); } - public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse getDefaultInstance() { + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse getDefaultInstance() { return DEFAULT_INSTANCE; } - @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public ListProceduresResponse parsePartialFrom( + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { + public ListLocksResponse parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new ListProceduresResponse(input, extensionRegistry); + return new ListLocksResponse(input, extensionRegistry); } }; - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { return PARSER; } - public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse getDefaultInstanceForType() { + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } @@ -70510,7 +71637,7 @@ public final class MasterProtos { /** *
-       ** Get a run of the CleanerChore
+       ** Get a run of the CleanerChore 
        * 
* * rpc RunCleanerChore(.hbase.pb.RunCleanerChoreRequest) returns (.hbase.pb.RunCleanerChoreResponse); @@ -70841,6 +71968,14 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done); /** + * rpc ListLocks(.hbase.pb.ListLocksRequest) returns (.hbase.pb.ListLocksResponse); + */ + public abstract void listLocks( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done); + + /** *
        ** Add a replication peer 
        * 
@@ -71446,6 +72581,14 @@ public final class MasterProtos { } @java.lang.Override + public void listLocks( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { + impl.listLocks(controller, request, done); + } + + @java.lang.Override public void addReplicationPeer( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest request, @@ -71668,24 +72811,26 @@ public final class MasterProtos { case 59: return impl.listProcedures(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest)request); case 60: - return impl.addReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest)request); + return impl.listLocks(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest)request); case 61: - return impl.removeReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest)request); + return impl.addReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest)request); case 62: - return impl.enableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest)request); + return impl.removeReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest)request); case 63: - return impl.disableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest)request); + return impl.enableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest)request); case 64: - return impl.getReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest)request); + return impl.disableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest)request); case 65: - return impl.updateReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest)request); + return impl.getReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest)request); case 66: - return impl.listReplicationPeers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest)request); + return impl.updateReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest)request); case 67: - return impl.listDrainingRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest)request); + return impl.listReplicationPeers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest)request); case 68: - return impl.drainRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest)request); + return impl.listDrainingRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest)request); case 69: + return impl.drainRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest)request); + case 70: return impl.removeDrainFromRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest)request); default: throw new java.lang.AssertionError("Can't get here."); @@ -71822,24 +72967,26 @@ public final class MasterProtos { case 59: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance(); case 60: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest.getDefaultInstance(); case 61: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest.getDefaultInstance(); case 62: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest.getDefaultInstance(); case 63: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest.getDefaultInstance(); case 64: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest.getDefaultInstance(); case 65: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest.getDefaultInstance(); case 66: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest.getDefaultInstance(); case 67: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest.getDefaultInstance(); case 68: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest.getDefaultInstance(); case 69: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest.getDefaultInstance(); + case 70: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -71976,24 +73123,26 @@ public final class MasterProtos { case 59: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(); case 60: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.getDefaultInstance(); case 61: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance(); case 62: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance(); case 63: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance(); case 64: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance(); case 65: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance(); case 66: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance(); case 67: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance(); case 68: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance(); case 69: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance(); + case 70: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -72424,7 +73573,7 @@ public final class MasterProtos { /** *
-     ** Get a run of the CleanerChore
+     ** Get a run of the CleanerChore 
      * 
* * rpc RunCleanerChore(.hbase.pb.RunCleanerChoreRequest) returns (.hbase.pb.RunCleanerChoreResponse); @@ -72755,6 +73904,14 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done); /** + * rpc ListLocks(.hbase.pb.ListLocksRequest) returns (.hbase.pb.ListLocksResponse); + */ + public abstract void listLocks( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done); + + /** *
      ** Add a replication peer 
      * 
@@ -73197,51 +74354,56 @@ public final class MasterProtos { done)); return; case 60: + this.listLocks(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest)request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 61: this.addReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 61: + case 62: this.removeReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 62: + case 63: this.enableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 63: + case 64: this.disableReplicationPeer(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 64: + case 65: this.getReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 65: + case 66: this.updateReplicationPeerConfig(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 66: + case 67: this.listReplicationPeers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 67: + case 68: this.listDrainingRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 68: + case 69: this.drainRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 69: + case 70: this.removeDrainFromRegionServers(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest)request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); @@ -73381,24 +74543,26 @@ public final class MasterProtos { case 59: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance(); case 60: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest.getDefaultInstance(); case 61: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest.getDefaultInstance(); case 62: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest.getDefaultInstance(); case 63: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest.getDefaultInstance(); case 64: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest.getDefaultInstance(); case 65: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest.getDefaultInstance(); case 66: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest.getDefaultInstance(); case 67: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest.getDefaultInstance(); case 68: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest.getDefaultInstance(); case 69: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest.getDefaultInstance(); + case 70: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -73535,24 +74699,26 @@ public final class MasterProtos { case 59: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(); case 60: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.getDefaultInstance(); case 61: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance(); case 62: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance(); case 63: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance(); case 64: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance(); case 65: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance(); case 66: - return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance(); case 67: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance(); case 68: - return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance(); case 69: + return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance(); + case 70: return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -74475,12 +75641,27 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance())); } + public void listLocks( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest request, + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(60), + controller, + request, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.getDefaultInstance(), + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.class, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.getDefaultInstance())); + } + public void addReplicationPeer( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(60), + getDescriptor().getMethods().get(61), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance(), @@ -74495,7 +75676,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(61), + getDescriptor().getMethods().get(62), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance(), @@ -74510,7 +75691,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(62), + getDescriptor().getMethods().get(63), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance(), @@ -74525,7 +75706,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(63), + getDescriptor().getMethods().get(64), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance(), @@ -74540,7 +75721,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(64), + getDescriptor().getMethods().get(65), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance(), @@ -74555,7 +75736,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(65), + getDescriptor().getMethods().get(66), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance(), @@ -74570,7 +75751,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(66), + getDescriptor().getMethods().get(67), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance(), @@ -74585,7 +75766,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(67), + getDescriptor().getMethods().get(68), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance(), @@ -74600,7 +75781,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(68), + getDescriptor().getMethods().get(69), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance(), @@ -74615,7 +75796,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest request, org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(69), + getDescriptor().getMethods().get(70), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse.getDefaultInstance(), @@ -74932,6 +76113,11 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse listLocks( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest request) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; + public org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse addReplicationPeer( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest request) @@ -75710,12 +76896,24 @@ public final class MasterProtos { } + public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse listLocks( + org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest request) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(60), + controller, + request, + org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse.getDefaultInstance()); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse addReplicationPeer( org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(60), + getDescriptor().getMethods().get(61), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse.getDefaultInstance()); @@ -75727,7 +76925,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(61), + getDescriptor().getMethods().get(62), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse.getDefaultInstance()); @@ -75739,7 +76937,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(62), + getDescriptor().getMethods().get(63), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse.getDefaultInstance()); @@ -75751,7 +76949,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(63), + getDescriptor().getMethods().get(64), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse.getDefaultInstance()); @@ -75763,7 +76961,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(64), + getDescriptor().getMethods().get(65), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse.getDefaultInstance()); @@ -75775,7 +76973,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(65), + getDescriptor().getMethods().get(66), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse.getDefaultInstance()); @@ -75787,7 +76985,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(66), + getDescriptor().getMethods().get(67), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse.getDefaultInstance()); @@ -75799,7 +76997,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(67), + getDescriptor().getMethods().get(68), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse.getDefaultInstance()); @@ -75811,7 +77009,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(68), + getDescriptor().getMethods().get(69), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse.getDefaultInstance()); @@ -75823,7 +77021,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest request) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(69), + getDescriptor().getMethods().get(70), controller, request, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse.getDefaultInstance()); @@ -76186,32 +77384,32 @@ public final class MasterProtos { internal_static_hbase_pb_IsCatalogJanitorEnabledResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RunCleanerChoreRequest_descriptor; - private static final + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RunCleanerChoreRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RunCleanerChoreResponse_descriptor; - private static final + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_RunCleanerChoreResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SetCleanerChoreRunningRequest_descriptor; - private static final + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SetCleanerChoreRunningRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SetCleanerChoreRunningResponse_descriptor; - private static final + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_SetCleanerChoreRunningResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsCleanerChoreEnabledRequest_descriptor; - private static final + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsCleanerChoreEnabledRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_IsCleanerChoreEnabledResponse_descriptor; - private static final + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_IsCleanerChoreEnabledResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor @@ -76385,6 +77583,16 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_ListLocksRequest_descriptor; + private static final + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_hbase_pb_ListLocksRequest_fieldAccessorTable; + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_ListLocksResponse_descriptor; + private static final + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_hbase_pb_ListLocksResponse_fieldAccessorTable; + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_SetQuotaRequest_descriptor; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable @@ -76460,383 +77668,387 @@ public final class MasterProtos { java.lang.String[] descriptorData = { "\n\014Master.proto\022\010hbase.pb\032\013HBase.proto\032\014C" + "lient.proto\032\023ClusterStatus.proto\032\023ErrorH" + - "andling.proto\032\017Procedure.proto\032\013Quota.pr" + - "oto\032\021Replication.proto\"\234\001\n\020AddColumnRequ" + - "est\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Table" + - "Name\0225\n\017column_families\030\002 \002(\0132\034.hbase.pb" + - ".ColumnFamilySchema\022\026\n\013nonce_group\030\003 \001(\004" + - ":\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"$\n\021AddColumnRespon" + - "se\022\017\n\007proc_id\030\001 \001(\004\"}\n\023DeleteColumnReque" + - "st\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.TableN", - "ame\022\023\n\013column_name\030\002 \002(\014\022\026\n\013nonce_group\030" + - "\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\'\n\024DeleteColu" + - "mnResponse\022\017\n\007proc_id\030\001 \001(\004\"\237\001\n\023ModifyCo" + - "lumnRequest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase." + - "pb.TableName\0225\n\017column_families\030\002 \002(\0132\034." + - "hbase.pb.ColumnFamilySchema\022\026\n\013nonce_gro" + - "up\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\'\n\024ModifyC" + - "olumnResponse\022\017\n\007proc_id\030\001 \001(\004\"n\n\021MoveRe" + - "gionRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.R" + - "egionSpecifier\022.\n\020dest_server_name\030\002 \001(\013", - "2\024.hbase.pb.ServerName\"\024\n\022MoveRegionResp" + - "onse\"\210\001\n\030MergeTableRegionsRequest\022)\n\006reg" + - "ion\030\001 \003(\0132\031.hbase.pb.RegionSpecifier\022\027\n\010" + - "forcible\030\003 \001(\010:\005false\022\026\n\013nonce_group\030\004 \001" + - "(\004:\0010\022\020\n\005nonce\030\005 \001(\004:\0010\",\n\031MergeTableReg" + - "ionsResponse\022\017\n\007proc_id\030\001 \001(\004\"@\n\023AssignR" + - "egionRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb." + - "RegionSpecifier\"\026\n\024AssignRegionResponse\"" + - "X\n\025UnassignRegionRequest\022)\n\006region\030\001 \002(\013" + - "2\031.hbase.pb.RegionSpecifier\022\024\n\005force\030\002 \001", - "(\010:\005false\"\030\n\026UnassignRegionResponse\"A\n\024O" + - "fflineRegionRequest\022)\n\006region\030\001 \002(\0132\031.hb" + - "ase.pb.RegionSpecifier\"\027\n\025OfflineRegionR" + - "esponse\"\177\n\022CreateTableRequest\022+\n\014table_s" + - "chema\030\001 \002(\0132\025.hbase.pb.TableSchema\022\022\n\nsp" + - "lit_keys\030\002 \003(\014\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020" + - "\n\005nonce\030\004 \001(\004:\0010\"&\n\023CreateTableResponse\022" + - "\017\n\007proc_id\030\001 \001(\004\"g\n\022DeleteTableRequest\022\'" + - "\n\ntable_name\030\001 \002(\0132\023.hbase.pb.TableName\022" + - "\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\001", - "0\"&\n\023DeleteTableResponse\022\017\n\007proc_id\030\001 \001(" + - "\004\"\207\001\n\024TruncateTableRequest\022&\n\ttableName\030" + - "\001 \002(\0132\023.hbase.pb.TableName\022\035\n\016preserveSp" + - "lits\030\002 \001(\010:\005false\022\026\n\013nonce_group\030\003 \001(\004:\001" + - "0\022\020\n\005nonce\030\004 \001(\004:\0010\"(\n\025TruncateTableResp" + - "onse\022\017\n\007proc_id\030\001 \001(\004\"g\n\022EnableTableRequ" + - "est\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Table" + - "Name\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 " + - "\001(\004:\0010\"&\n\023EnableTableResponse\022\017\n\007proc_id" + - "\030\001 \001(\004\"h\n\023DisableTableRequest\022\'\n\ntable_n", - "ame\030\001 \002(\0132\023.hbase.pb.TableName\022\026\n\013nonce_" + - "group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"\'\n\024Disa" + - "bleTableResponse\022\017\n\007proc_id\030\001 \001(\004\"\224\001\n\022Mo" + - "difyTableRequest\022\'\n\ntable_name\030\001 \002(\0132\023.h" + - "base.pb.TableName\022+\n\014table_schema\030\002 \002(\0132" + - "\025.hbase.pb.TableSchema\022\026\n\013nonce_group\030\003 " + - "\001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"&\n\023ModifyTableR" + - "esponse\022\017\n\007proc_id\030\001 \001(\004\"~\n\026CreateNamesp" + - "aceRequest\022:\n\023namespaceDescriptor\030\001 \002(\0132" + - "\035.hbase.pb.NamespaceDescriptor\022\026\n\013nonce_", - "group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"*\n\027Crea" + - "teNamespaceResponse\022\017\n\007proc_id\030\001 \001(\004\"Y\n\026" + - "DeleteNamespaceRequest\022\025\n\rnamespaceName\030" + - "\001 \002(\t\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003" + - " \001(\004:\0010\"*\n\027DeleteNamespaceResponse\022\017\n\007pr" + - "oc_id\030\001 \001(\004\"~\n\026ModifyNamespaceRequest\022:\n" + - "\023namespaceDescriptor\030\001 \002(\0132\035.hbase.pb.Na" + - "mespaceDescriptor\022\026\n\013nonce_group\030\002 \001(\004:\001" + - "0\022\020\n\005nonce\030\003 \001(\004:\0010\"*\n\027ModifyNamespaceRe" + - "sponse\022\017\n\007proc_id\030\001 \001(\004\"6\n\035GetNamespaceD", - "escriptorRequest\022\025\n\rnamespaceName\030\001 \002(\t\"" + - "\\\n\036GetNamespaceDescriptorResponse\022:\n\023nam" + - "espaceDescriptor\030\001 \002(\0132\035.hbase.pb.Namesp" + - "aceDescriptor\"!\n\037ListNamespaceDescriptor" + - "sRequest\"^\n ListNamespaceDescriptorsResp" + - "onse\022:\n\023namespaceDescriptor\030\001 \003(\0132\035.hbas" + - "e.pb.NamespaceDescriptor\"?\n&ListTableDes" + - "criptorsByNamespaceRequest\022\025\n\rnamespaceN" + - "ame\030\001 \002(\t\"U\n\'ListTableDescriptorsByNames" + - "paceResponse\022*\n\013tableSchema\030\001 \003(\0132\025.hbas", - "e.pb.TableSchema\"9\n ListTableNamesByName" + - "spaceRequest\022\025\n\rnamespaceName\030\001 \002(\t\"K\n!L" + - "istTableNamesByNamespaceResponse\022&\n\ttabl" + - "eName\030\001 \003(\0132\023.hbase.pb.TableName\"\021\n\017Shut" + - "downRequest\"\022\n\020ShutdownResponse\"\023\n\021StopM" + - "asterRequest\"\024\n\022StopMasterResponse\"\034\n\032Is" + - "InMaintenanceModeRequest\"8\n\033IsInMaintena" + - "nceModeResponse\022\031\n\021inMaintenanceMode\030\001 \002" + - "(\010\"\037\n\016BalanceRequest\022\r\n\005force\030\001 \001(\010\"\'\n\017B" + - "alanceResponse\022\024\n\014balancer_ran\030\001 \002(\010\"<\n\031", - "SetBalancerRunningRequest\022\n\n\002on\030\001 \002(\010\022\023\n" + - "\013synchronous\030\002 \001(\010\"8\n\032SetBalancerRunning" + - "Response\022\032\n\022prev_balance_value\030\001 \001(\010\"\032\n\030" + - "IsBalancerEnabledRequest\",\n\031IsBalancerEn" + - "abledResponse\022\017\n\007enabled\030\001 \002(\010\"w\n\035SetSpl" + - "itOrMergeEnabledRequest\022\017\n\007enabled\030\001 \002(\010" + - "\022\023\n\013synchronous\030\002 \001(\010\0220\n\014switch_types\030\003 " + - "\003(\0162\032.hbase.pb.MasterSwitchType\"4\n\036SetSp" + - "litOrMergeEnabledResponse\022\022\n\nprev_value\030" + - "\001 \003(\010\"O\n\034IsSplitOrMergeEnabledRequest\022/\n", - "\013switch_type\030\001 \002(\0162\032.hbase.pb.MasterSwit" + - "chType\"0\n\035IsSplitOrMergeEnabledResponse\022" + - "\017\n\007enabled\030\001 \002(\010\"\022\n\020NormalizeRequest\"+\n\021" + - "NormalizeResponse\022\026\n\016normalizer_ran\030\001 \002(" + - "\010\")\n\033SetNormalizerRunningRequest\022\n\n\002on\030\001" + - " \002(\010\"=\n\034SetNormalizerRunningResponse\022\035\n\025" + - "prev_normalizer_value\030\001 \001(\010\"\034\n\032IsNormali" + - "zerEnabledRequest\".\n\033IsNormalizerEnabled" + - "Response\022\017\n\007enabled\030\001 \002(\010\"\027\n\025RunCatalogS" + - "canRequest\"-\n\026RunCatalogScanResponse\022\023\n\013", - "scan_result\030\001 \001(\005\"-\n\033EnableCatalogJanito" + - "rRequest\022\016\n\006enable\030\001 \002(\010\"2\n\034EnableCatalo" + - "gJanitorResponse\022\022\n\nprev_value\030\001 \001(\010\" \n\036" + - "IsCatalogJanitorEnabledRequest\"0\n\037IsCata" + - "logJanitorEnabledResponse\022\r\n\005value\030\001 \002(\010" + - "\"\030\n\026RunCleanerChoreRequest\"4\n\027RunCleaner" + - "ChoreResponse\022\031\n\021cleaner_chore_ran\030\001 \002(\010" + - "\"+\n\035SetCleanerChoreRunningRequest\022\n\n\002on\030" + - "\001 \002(\010\"4\n\036SetCleanerChoreRunningResponse\022" + - "\022\n\nprev_value\030\001 \001(\010\"\036\n\034IsCleanerChoreEna", - "bledRequest\".\n\035IsCleanerChoreEnabledResp" + - "onse\022\r\n\005value\030\001 \002(\010\"B\n\017SnapshotRequest\022/" + - "\n\010snapshot\030\001 \002(\0132\035.hbase.pb.SnapshotDesc" + - "ription\",\n\020SnapshotResponse\022\030\n\020expected_" + - "timeout\030\001 \002(\003\"\036\n\034GetCompletedSnapshotsRe" + - "quest\"Q\n\035GetCompletedSnapshotsResponse\0220" + - "\n\tsnapshots\030\001 \003(\0132\035.hbase.pb.SnapshotDes" + - "cription\"H\n\025DeleteSnapshotRequest\022/\n\010sna" + - "pshot\030\001 \002(\0132\035.hbase.pb.SnapshotDescripti" + - "on\"\030\n\026DeleteSnapshotResponse\"s\n\026RestoreS", - "napshotRequest\022/\n\010snapshot\030\001 \002(\0132\035.hbase" + - ".pb.SnapshotDescription\022\026\n\013nonce_group\030\002" + - " \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"*\n\027RestoreSnap" + - "shotResponse\022\017\n\007proc_id\030\001 \002(\004\"H\n\025IsSnaps" + - "hotDoneRequest\022/\n\010snapshot\030\001 \001(\0132\035.hbase" + - ".pb.SnapshotDescription\"^\n\026IsSnapshotDon" + - "eResponse\022\023\n\004done\030\001 \001(\010:\005false\022/\n\010snapsh" + - "ot\030\002 \001(\0132\035.hbase.pb.SnapshotDescription\"" + - "O\n\034IsRestoreSnapshotDoneRequest\022/\n\010snaps" + - "hot\030\001 \001(\0132\035.hbase.pb.SnapshotDescription", - "\"4\n\035IsRestoreSnapshotDoneResponse\022\023\n\004don" + - "e\030\001 \001(\010:\005false\"F\n\033GetSchemaAlterStatusRe" + - "quest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Tab" + - "leName\"T\n\034GetSchemaAlterStatusResponse\022\035" + - "\n\025yet_to_update_regions\030\001 \001(\r\022\025\n\rtotal_r" + - "egions\030\002 \001(\r\"\213\001\n\032GetTableDescriptorsRequ" + - "est\022(\n\013table_names\030\001 \003(\0132\023.hbase.pb.Tabl" + - "eName\022\r\n\005regex\030\002 \001(\t\022!\n\022include_sys_tabl" + - "es\030\003 \001(\010:\005false\022\021\n\tnamespace\030\004 \001(\t\"J\n\033Ge" + - "tTableDescriptorsResponse\022+\n\014table_schem", - "a\030\001 \003(\0132\025.hbase.pb.TableSchema\"[\n\024GetTab" + - "leNamesRequest\022\r\n\005regex\030\001 \001(\t\022!\n\022include" + - "_sys_tables\030\002 \001(\010:\005false\022\021\n\tnamespace\030\003 " + - "\001(\t\"A\n\025GetTableNamesResponse\022(\n\013table_na" + - "mes\030\001 \003(\0132\023.hbase.pb.TableName\"?\n\024GetTab" + - "leStateRequest\022\'\n\ntable_name\030\001 \002(\0132\023.hba" + - "se.pb.TableName\"B\n\025GetTableStateResponse" + - "\022)\n\013table_state\030\001 \002(\0132\024.hbase.pb.TableSt" + - "ate\"\031\n\027GetClusterStatusRequest\"K\n\030GetClu" + - "sterStatusResponse\022/\n\016cluster_status\030\001 \002", - "(\0132\027.hbase.pb.ClusterStatus\"\030\n\026IsMasterR" + - "unningRequest\"4\n\027IsMasterRunningResponse" + - "\022\031\n\021is_master_running\030\001 \002(\010\"I\n\024ExecProce" + - "dureRequest\0221\n\tprocedure\030\001 \002(\0132\036.hbase.p" + - "b.ProcedureDescription\"F\n\025ExecProcedureR" + - "esponse\022\030\n\020expected_timeout\030\001 \001(\003\022\023\n\013ret" + - "urn_data\030\002 \001(\014\"K\n\026IsProcedureDoneRequest" + - "\0221\n\tprocedure\030\001 \001(\0132\036.hbase.pb.Procedure" + - "Description\"`\n\027IsProcedureDoneResponse\022\023" + - "\n\004done\030\001 \001(\010:\005false\0220\n\010snapshot\030\002 \001(\0132\036.", - "hbase.pb.ProcedureDescription\",\n\031GetProc" + - "edureResultRequest\022\017\n\007proc_id\030\001 \002(\004\"\371\001\n\032" + - "GetProcedureResultResponse\0229\n\005state\030\001 \002(" + - "\0162*.hbase.pb.GetProcedureResultResponse." + - "State\022\022\n\nstart_time\030\002 \001(\004\022\023\n\013last_update" + - "\030\003 \001(\004\022\016\n\006result\030\004 \001(\014\0224\n\texception\030\005 \001(" + - "\0132!.hbase.pb.ForeignExceptionMessage\"1\n\005" + - "State\022\r\n\tNOT_FOUND\020\000\022\013\n\007RUNNING\020\001\022\014\n\010FIN" + - "ISHED\020\002\"M\n\025AbortProcedureRequest\022\017\n\007proc" + - "_id\030\001 \002(\004\022#\n\025mayInterruptIfRunning\030\002 \001(\010", - ":\004true\"6\n\026AbortProcedureResponse\022\034\n\024is_p" + - "rocedure_aborted\030\001 \002(\010\"\027\n\025ListProcedures" + - "Request\"@\n\026ListProceduresResponse\022&\n\tpro" + - "cedure\030\001 \003(\0132\023.hbase.pb.Procedure\"\315\001\n\017Se" + - "tQuotaRequest\022\021\n\tuser_name\030\001 \001(\t\022\022\n\nuser" + - "_group\030\002 \001(\t\022\021\n\tnamespace\030\003 \001(\t\022\'\n\ntable" + - "_name\030\004 \001(\0132\023.hbase.pb.TableName\022\022\n\nremo" + - "ve_all\030\005 \001(\010\022\026\n\016bypass_globals\030\006 \001(\010\022+\n\010" + - "throttle\030\007 \001(\0132\031.hbase.pb.ThrottleReques" + - "t\"\022\n\020SetQuotaResponse\"J\n\037MajorCompaction", - "TimestampRequest\022\'\n\ntable_name\030\001 \002(\0132\023.h" + - "base.pb.TableName\"U\n(MajorCompactionTime" + - "stampForRegionRequest\022)\n\006region\030\001 \002(\0132\031." + - "hbase.pb.RegionSpecifier\"@\n MajorCompact" + - "ionTimestampResponse\022\034\n\024compaction_times" + - "tamp\030\001 \002(\003\"\035\n\033SecurityCapabilitiesReques" + - "t\"\354\001\n\034SecurityCapabilitiesResponse\022G\n\014ca" + - "pabilities\030\001 \003(\01621.hbase.pb.SecurityCapa" + - "bilitiesResponse.Capability\"\202\001\n\nCapabili" + - "ty\022\031\n\025SIMPLE_AUTHENTICATION\020\000\022\031\n\025SECURE_", - "AUTHENTICATION\020\001\022\021\n\rAUTHORIZATION\020\002\022\026\n\022C" + - "ELL_AUTHORIZATION\020\003\022\023\n\017CELL_VISIBILITY\020\004" + - "\"\"\n ListDrainingRegionServersRequest\"N\n!" + - "ListDrainingRegionServersResponse\022)\n\013ser" + - "ver_name\030\001 \003(\0132\024.hbase.pb.ServerName\"F\n\031" + - "DrainRegionServersRequest\022)\n\013server_name" + - "\030\001 \003(\0132\024.hbase.pb.ServerName\"\034\n\032DrainReg" + - "ionServersResponse\"P\n#RemoveDrainFromReg" + - "ionServersRequest\022)\n\013server_name\030\001 \003(\0132\024" + - ".hbase.pb.ServerName\"&\n$RemoveDrainFromR", - "egionServersResponse*(\n\020MasterSwitchType" + - "\022\t\n\005SPLIT\020\000\022\t\n\005MERGE\020\0012\3013\n\rMasterService" + - "\022e\n\024GetSchemaAlterStatus\022%.hbase.pb.GetS" + - "chemaAlterStatusRequest\032&.hbase.pb.GetSc" + - "hemaAlterStatusResponse\022b\n\023GetTableDescr" + - "iptors\022$.hbase.pb.GetTableDescriptorsReq" + - "uest\032%.hbase.pb.GetTableDescriptorsRespo" + - "nse\022P\n\rGetTableNames\022\036.hbase.pb.GetTable" + - "NamesRequest\032\037.hbase.pb.GetTableNamesRes" + - "ponse\022Y\n\020GetClusterStatus\022!.hbase.pb.Get", - "ClusterStatusRequest\032\".hbase.pb.GetClust" + - "erStatusResponse\022V\n\017IsMasterRunning\022 .hb" + - "ase.pb.IsMasterRunningRequest\032!.hbase.pb" + - ".IsMasterRunningResponse\022D\n\tAddColumn\022\032." + - "hbase.pb.AddColumnRequest\032\033.hbase.pb.Add" + - "ColumnResponse\022M\n\014DeleteColumn\022\035.hbase.p" + - "b.DeleteColumnRequest\032\036.hbase.pb.DeleteC" + - "olumnResponse\022M\n\014ModifyColumn\022\035.hbase.pb" + - ".ModifyColumnRequest\032\036.hbase.pb.ModifyCo" + - "lumnResponse\022G\n\nMoveRegion\022\033.hbase.pb.Mo", - "veRegionRequest\032\034.hbase.pb.MoveRegionRes" + - "ponse\022\\\n\021MergeTableRegions\022\".hbase.pb.Me" + - "rgeTableRegionsRequest\032#.hbase.pb.MergeT" + - "ableRegionsResponse\022M\n\014AssignRegion\022\035.hb" + - "ase.pb.AssignRegionRequest\032\036.hbase.pb.As" + - "signRegionResponse\022S\n\016UnassignRegion\022\037.h" + - "base.pb.UnassignRegionRequest\032 .hbase.pb" + - ".UnassignRegionResponse\022P\n\rOfflineRegion" + - "\022\036.hbase.pb.OfflineRegionRequest\032\037.hbase" + - ".pb.OfflineRegionResponse\022J\n\013DeleteTable", - "\022\034.hbase.pb.DeleteTableRequest\032\035.hbase.p" + - "b.DeleteTableResponse\022P\n\rtruncateTable\022\036" + - ".hbase.pb.TruncateTableRequest\032\037.hbase.p" + - "b.TruncateTableResponse\022J\n\013EnableTable\022\034" + - ".hbase.pb.EnableTableRequest\032\035.hbase.pb." + - "EnableTableResponse\022M\n\014DisableTable\022\035.hb" + - "ase.pb.DisableTableRequest\032\036.hbase.pb.Di" + - "sableTableResponse\022J\n\013ModifyTable\022\034.hbas" + - "e.pb.ModifyTableRequest\032\035.hbase.pb.Modif" + - "yTableResponse\022J\n\013CreateTable\022\034.hbase.pb", - ".CreateTableRequest\032\035.hbase.pb.CreateTab" + - "leResponse\022A\n\010Shutdown\022\031.hbase.pb.Shutdo" + - "wnRequest\032\032.hbase.pb.ShutdownResponse\022G\n" + - "\nStopMaster\022\033.hbase.pb.StopMasterRequest" + - "\032\034.hbase.pb.StopMasterResponse\022h\n\031IsMast" + - "erInMaintenanceMode\022$.hbase.pb.IsInMaint" + - "enanceModeRequest\032%.hbase.pb.IsInMainten" + - "anceModeResponse\022>\n\007Balance\022\030.hbase.pb.B" + - "alanceRequest\032\031.hbase.pb.BalanceResponse" + - "\022_\n\022SetBalancerRunning\022#.hbase.pb.SetBal", - "ancerRunningRequest\032$.hbase.pb.SetBalanc" + - "erRunningResponse\022\\\n\021IsBalancerEnabled\022\"" + - ".hbase.pb.IsBalancerEnabledRequest\032#.hba" + - "se.pb.IsBalancerEnabledResponse\022k\n\026SetSp" + - "litOrMergeEnabled\022\'.hbase.pb.SetSplitOrM" + - "ergeEnabledRequest\032(.hbase.pb.SetSplitOr" + - "MergeEnabledResponse\022h\n\025IsSplitOrMergeEn" + - "abled\022&.hbase.pb.IsSplitOrMergeEnabledRe" + - "quest\032\'.hbase.pb.IsSplitOrMergeEnabledRe" + - "sponse\022D\n\tNormalize\022\032.hbase.pb.Normalize", - "Request\032\033.hbase.pb.NormalizeResponse\022e\n\024" + - "SetNormalizerRunning\022%.hbase.pb.SetNorma" + - "lizerRunningRequest\032&.hbase.pb.SetNormal" + - "izerRunningResponse\022b\n\023IsNormalizerEnabl" + - "ed\022$.hbase.pb.IsNormalizerEnabledRequest" + - "\032%.hbase.pb.IsNormalizerEnabledResponse\022" + - "S\n\016RunCatalogScan\022\037.hbase.pb.RunCatalogS" + - "canRequest\032 .hbase.pb.RunCatalogScanResp" + - "onse\022e\n\024EnableCatalogJanitor\022%.hbase.pb." + - "EnableCatalogJanitorRequest\032&.hbase.pb.E", - "nableCatalogJanitorResponse\022n\n\027IsCatalog" + - "JanitorEnabled\022(.hbase.pb.IsCatalogJanit" + - "orEnabledRequest\032).hbase.pb.IsCatalogJan" + - "itorEnabledResponse\022V\n\017RunCleanerChore\022 " + - ".hbase.pb.RunCleanerChoreRequest\032!.hbase" + - ".pb.RunCleanerChoreResponse\022k\n\026SetCleane" + - "rChoreRunning\022\'.hbase.pb.SetCleanerChore" + - "RunningRequest\032(.hbase.pb.SetCleanerChor" + - "eRunningResponse\022h\n\025IsCleanerChoreEnable" + - "d\022&.hbase.pb.IsCleanerChoreEnabledReques", - "t\032\'.hbase.pb.IsCleanerChoreEnabledRespon" + - "se\022^\n\021ExecMasterService\022#.hbase.pb.Copro" + - "cessorServiceRequest\032$.hbase.pb.Coproces" + - "sorServiceResponse\022A\n\010Snapshot\022\031.hbase.p" + - "b.SnapshotRequest\032\032.hbase.pb.SnapshotRes" + - "ponse\022h\n\025GetCompletedSnapshots\022&.hbase.p" + - "b.GetCompletedSnapshotsRequest\032\'.hbase.p" + - "b.GetCompletedSnapshotsResponse\022S\n\016Delet" + - "eSnapshot\022\037.hbase.pb.DeleteSnapshotReque" + - "st\032 .hbase.pb.DeleteSnapshotResponse\022S\n\016", - "IsSnapshotDone\022\037.hbase.pb.IsSnapshotDone" + - "Request\032 .hbase.pb.IsSnapshotDoneRespons" + - "e\022V\n\017RestoreSnapshot\022 .hbase.pb.RestoreS" + - "napshotRequest\032!.hbase.pb.RestoreSnapsho" + - "tResponse\022P\n\rExecProcedure\022\036.hbase.pb.Ex" + - "ecProcedureRequest\032\037.hbase.pb.ExecProced" + - "ureResponse\022W\n\024ExecProcedureWithRet\022\036.hb" + - "ase.pb.ExecProcedureRequest\032\037.hbase.pb.E" + - "xecProcedureResponse\022V\n\017IsProcedureDone\022" + - " .hbase.pb.IsProcedureDoneRequest\032!.hbas", - "e.pb.IsProcedureDoneResponse\022V\n\017ModifyNa" + - "mespace\022 .hbase.pb.ModifyNamespaceReques" + - "t\032!.hbase.pb.ModifyNamespaceResponse\022V\n\017" + - "CreateNamespace\022 .hbase.pb.CreateNamespa" + - "ceRequest\032!.hbase.pb.CreateNamespaceResp" + - "onse\022V\n\017DeleteNamespace\022 .hbase.pb.Delet" + - "eNamespaceRequest\032!.hbase.pb.DeleteNames" + - "paceResponse\022k\n\026GetNamespaceDescriptor\022\'" + - ".hbase.pb.GetNamespaceDescriptorRequest\032" + - "(.hbase.pb.GetNamespaceDescriptorRespons", - "e\022q\n\030ListNamespaceDescriptors\022).hbase.pb" + - ".ListNamespaceDescriptorsRequest\032*.hbase" + - ".pb.ListNamespaceDescriptorsResponse\022\206\001\n" + - "\037ListTableDescriptorsByNamespace\0220.hbase" + - ".pb.ListTableDescriptorsByNamespaceReque" + - "st\0321.hbase.pb.ListTableDescriptorsByName" + - "spaceResponse\022t\n\031ListTableNamesByNamespa" + - "ce\022*.hbase.pb.ListTableNamesByNamespaceR" + - "equest\032+.hbase.pb.ListTableNamesByNamesp" + - "aceResponse\022P\n\rGetTableState\022\036.hbase.pb.", - "GetTableStateRequest\032\037.hbase.pb.GetTable" + - "StateResponse\022A\n\010SetQuota\022\031.hbase.pb.Set" + - "QuotaRequest\032\032.hbase.pb.SetQuotaResponse" + - "\022x\n\037getLastMajorCompactionTimestamp\022).hb" + - "ase.pb.MajorCompactionTimestampRequest\032*" + - ".hbase.pb.MajorCompactionTimestampRespon" + - "se\022\212\001\n(getLastMajorCompactionTimestampFo" + - "rRegion\0222.hbase.pb.MajorCompactionTimest" + - "ampForRegionRequest\032*.hbase.pb.MajorComp" + - "actionTimestampResponse\022_\n\022getProcedureR", - "esult\022#.hbase.pb.GetProcedureResultReque" + - "st\032$.hbase.pb.GetProcedureResultResponse" + - "\022h\n\027getSecurityCapabilities\022%.hbase.pb.S" + - "ecurityCapabilitiesRequest\032&.hbase.pb.Se" + - "curityCapabilitiesResponse\022S\n\016AbortProce" + - "dure\022\037.hbase.pb.AbortProcedureRequest\032 ." + - "hbase.pb.AbortProcedureResponse\022S\n\016ListP" + - "rocedures\022\037.hbase.pb.ListProceduresReque" + - "st\032 .hbase.pb.ListProceduresResponse\022_\n\022" + - "AddReplicationPeer\022#.hbase.pb.AddReplica", - "tionPeerRequest\032$.hbase.pb.AddReplicatio" + - "nPeerResponse\022h\n\025RemoveReplicationPeer\022&" + - ".hbase.pb.RemoveReplicationPeerRequest\032\'" + - ".hbase.pb.RemoveReplicationPeerResponse\022" + - "h\n\025EnableReplicationPeer\022&.hbase.pb.Enab" + - "leReplicationPeerRequest\032\'.hbase.pb.Enab" + - "leReplicationPeerResponse\022k\n\026DisableRepl" + - "icationPeer\022\'.hbase.pb.DisableReplicatio" + - "nPeerRequest\032(.hbase.pb.DisableReplicati" + - "onPeerResponse\022q\n\030GetReplicationPeerConf", - "ig\022).hbase.pb.GetReplicationPeerConfigRe" + - "quest\032*.hbase.pb.GetReplicationPeerConfi" + - "gResponse\022z\n\033UpdateReplicationPeerConfig" + - "\022,.hbase.pb.UpdateReplicationPeerConfigR" + - "equest\032-.hbase.pb.UpdateReplicationPeerC" + - "onfigResponse\022e\n\024ListReplicationPeers\022%." + - "hbase.pb.ListReplicationPeersRequest\032&.h" + - "base.pb.ListReplicationPeersResponse\022t\n\031" + - "listDrainingRegionServers\022*.hbase.pb.Lis" + - "tDrainingRegionServersRequest\032+.hbase.pb", - ".ListDrainingRegionServersResponse\022_\n\022dr" + - "ainRegionServers\022#.hbase.pb.DrainRegionS" + - "erversRequest\032$.hbase.pb.DrainRegionServ" + - "ersResponse\022}\n\034removeDrainFromRegionServ" + - "ers\022-.hbase.pb.RemoveDrainFromRegionServ" + - "ersRequest\032..hbase.pb.RemoveDrainFromReg" + - "ionServersResponseBI\n1org.apache.hadoop." + - "hbase.shaded.protobuf.generatedB\014MasterP" + - "rotosH\001\210\001\001\240\001\001" + "andling.proto\032\nLock.proto\032\017Procedure.pro" + + "to\032\013Quota.proto\032\021Replication.proto\"\234\001\n\020A" + + "ddColumnRequest\022\'\n\ntable_name\030\001 \002(\0132\023.hb" + + "ase.pb.TableName\0225\n\017column_families\030\002 \002(" + + "\0132\034.hbase.pb.ColumnFamilySchema\022\026\n\013nonce" + + "_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"$\n\021Add" + + "ColumnResponse\022\017\n\007proc_id\030\001 \001(\004\"}\n\023Delet" + + "eColumnRequest\022\'\n\ntable_name\030\001 \002(\0132\023.hba", + "se.pb.TableName\022\023\n\013column_name\030\002 \002(\014\022\026\n\013" + + "nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\'" + + "\n\024DeleteColumnResponse\022\017\n\007proc_id\030\001 \001(\004\"" + + "\237\001\n\023ModifyColumnRequest\022\'\n\ntable_name\030\001 " + + "\002(\0132\023.hbase.pb.TableName\0225\n\017column_famil" + + "ies\030\002 \002(\0132\034.hbase.pb.ColumnFamilySchema\022" + + "\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\001" + + "0\"\'\n\024ModifyColumnResponse\022\017\n\007proc_id\030\001 \001" + + "(\004\"n\n\021MoveRegionRequest\022)\n\006region\030\001 \002(\0132" + + "\031.hbase.pb.RegionSpecifier\022.\n\020dest_serve", + "r_name\030\002 \001(\0132\024.hbase.pb.ServerName\"\024\n\022Mo" + + "veRegionResponse\"\210\001\n\030MergeTableRegionsRe" + + "quest\022)\n\006region\030\001 \003(\0132\031.hbase.pb.RegionS" + + "pecifier\022\027\n\010forcible\030\003 \001(\010:\005false\022\026\n\013non" + + "ce_group\030\004 \001(\004:\0010\022\020\n\005nonce\030\005 \001(\004:\0010\",\n\031M" + + "ergeTableRegionsResponse\022\017\n\007proc_id\030\001 \001(" + + "\004\"@\n\023AssignRegionRequest\022)\n\006region\030\001 \002(\013" + + "2\031.hbase.pb.RegionSpecifier\"\026\n\024AssignReg" + + "ionResponse\"X\n\025UnassignRegionRequest\022)\n\006" + + "region\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\022", + "\024\n\005force\030\002 \001(\010:\005false\"\030\n\026UnassignRegionR" + + "esponse\"A\n\024OfflineRegionRequest\022)\n\006regio" + + "n\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\"\027\n\025Of" + + "flineRegionResponse\"\177\n\022CreateTableReques" + + "t\022+\n\014table_schema\030\001 \002(\0132\025.hbase.pb.Table" + + "Schema\022\022\n\nsplit_keys\030\002 \003(\014\022\026\n\013nonce_grou" + + "p\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"&\n\023CreateTa" + + "bleResponse\022\017\n\007proc_id\030\001 \001(\004\"g\n\022DeleteTa" + + "bleRequest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.p" + + "b.TableName\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005n", + "once\030\003 \001(\004:\0010\"&\n\023DeleteTableResponse\022\017\n\007" + + "proc_id\030\001 \001(\004\"\207\001\n\024TruncateTableRequest\022&" + + "\n\ttableName\030\001 \002(\0132\023.hbase.pb.TableName\022\035" + + "\n\016preserveSplits\030\002 \001(\010:\005false\022\026\n\013nonce_g" + + "roup\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"(\n\025Trunc" + + "ateTableResponse\022\017\n\007proc_id\030\001 \001(\004\"g\n\022Ena" + + "bleTableRequest\022\'\n\ntable_name\030\001 \002(\0132\023.hb" + + "ase.pb.TableName\022\026\n\013nonce_group\030\002 \001(\004:\0010" + + "\022\020\n\005nonce\030\003 \001(\004:\0010\"&\n\023EnableTableRespons" + + "e\022\017\n\007proc_id\030\001 \001(\004\"h\n\023DisableTableReques", + "t\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.TableNa" + + "me\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(" + + "\004:\0010\"\'\n\024DisableTableResponse\022\017\n\007proc_id\030" + + "\001 \001(\004\"\224\001\n\022ModifyTableRequest\022\'\n\ntable_na" + + "me\030\001 \002(\0132\023.hbase.pb.TableName\022+\n\014table_s" + + "chema\030\002 \002(\0132\025.hbase.pb.TableSchema\022\026\n\013no" + + "nce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"&\n\023" + + "ModifyTableResponse\022\017\n\007proc_id\030\001 \001(\004\"~\n\026" + + "CreateNamespaceRequest\022:\n\023namespaceDescr" + + "iptor\030\001 \002(\0132\035.hbase.pb.NamespaceDescript", + "or\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(" + + "\004:\0010\"*\n\027CreateNamespaceResponse\022\017\n\007proc_" + + "id\030\001 \001(\004\"Y\n\026DeleteNamespaceRequest\022\025\n\rna" + + "mespaceName\030\001 \002(\t\022\026\n\013nonce_group\030\002 \001(\004:\001" + + "0\022\020\n\005nonce\030\003 \001(\004:\0010\"*\n\027DeleteNamespaceRe" + + "sponse\022\017\n\007proc_id\030\001 \001(\004\"~\n\026ModifyNamespa" + + "ceRequest\022:\n\023namespaceDescriptor\030\001 \002(\0132\035" + + ".hbase.pb.NamespaceDescriptor\022\026\n\013nonce_g" + + "roup\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"*\n\027Modif" + + "yNamespaceResponse\022\017\n\007proc_id\030\001 \001(\004\"6\n\035G", + "etNamespaceDescriptorRequest\022\025\n\rnamespac" + + "eName\030\001 \002(\t\"\\\n\036GetNamespaceDescriptorRes" + + "ponse\022:\n\023namespaceDescriptor\030\001 \002(\0132\035.hba" + + "se.pb.NamespaceDescriptor\"!\n\037ListNamespa" + + "ceDescriptorsRequest\"^\n ListNamespaceDes" + + "criptorsResponse\022:\n\023namespaceDescriptor\030" + + "\001 \003(\0132\035.hbase.pb.NamespaceDescriptor\"?\n&" + + "ListTableDescriptorsByNamespaceRequest\022\025" + + "\n\rnamespaceName\030\001 \002(\t\"U\n\'ListTableDescri" + + "ptorsByNamespaceResponse\022*\n\013tableSchema\030", + "\001 \003(\0132\025.hbase.pb.TableSchema\"9\n ListTabl" + + "eNamesByNamespaceRequest\022\025\n\rnamespaceNam" + + "e\030\001 \002(\t\"K\n!ListTableNamesByNamespaceResp" + + "onse\022&\n\ttableName\030\001 \003(\0132\023.hbase.pb.Table" + + "Name\"\021\n\017ShutdownRequest\"\022\n\020ShutdownRespo" + + "nse\"\023\n\021StopMasterRequest\"\024\n\022StopMasterRe" + + "sponse\"\034\n\032IsInMaintenanceModeRequest\"8\n\033" + + "IsInMaintenanceModeResponse\022\031\n\021inMainten" + + "anceMode\030\001 \002(\010\"\037\n\016BalanceRequest\022\r\n\005forc" + + "e\030\001 \001(\010\"\'\n\017BalanceResponse\022\024\n\014balancer_r", + "an\030\001 \002(\010\"<\n\031SetBalancerRunningRequest\022\n\n" + + "\002on\030\001 \002(\010\022\023\n\013synchronous\030\002 \001(\010\"8\n\032SetBal" + + "ancerRunningResponse\022\032\n\022prev_balance_val" + + "ue\030\001 \001(\010\"\032\n\030IsBalancerEnabledRequest\",\n\031" + + "IsBalancerEnabledResponse\022\017\n\007enabled\030\001 \002" + + "(\010\"w\n\035SetSplitOrMergeEnabledRequest\022\017\n\007e" + + "nabled\030\001 \002(\010\022\023\n\013synchronous\030\002 \001(\010\0220\n\014swi" + + "tch_types\030\003 \003(\0162\032.hbase.pb.MasterSwitchT" + + "ype\"4\n\036SetSplitOrMergeEnabledResponse\022\022\n" + + "\nprev_value\030\001 \003(\010\"O\n\034IsSplitOrMergeEnabl", + "edRequest\022/\n\013switch_type\030\001 \002(\0162\032.hbase.p" + + "b.MasterSwitchType\"0\n\035IsSplitOrMergeEnab" + + "ledResponse\022\017\n\007enabled\030\001 \002(\010\"\022\n\020Normaliz" + + "eRequest\"+\n\021NormalizeResponse\022\026\n\016normali" + + "zer_ran\030\001 \002(\010\")\n\033SetNormalizerRunningReq" + + "uest\022\n\n\002on\030\001 \002(\010\"=\n\034SetNormalizerRunning" + + "Response\022\035\n\025prev_normalizer_value\030\001 \001(\010\"" + + "\034\n\032IsNormalizerEnabledRequest\".\n\033IsNorma" + + "lizerEnabledResponse\022\017\n\007enabled\030\001 \002(\010\"\027\n" + + "\025RunCatalogScanRequest\"-\n\026RunCatalogScan", + "Response\022\023\n\013scan_result\030\001 \001(\005\"-\n\033EnableC" + + "atalogJanitorRequest\022\016\n\006enable\030\001 \002(\010\"2\n\034" + + "EnableCatalogJanitorResponse\022\022\n\nprev_val" + + "ue\030\001 \001(\010\" \n\036IsCatalogJanitorEnabledReque" + + "st\"0\n\037IsCatalogJanitorEnabledResponse\022\r\n" + + "\005value\030\001 \002(\010\"\030\n\026RunCleanerChoreRequest\"4" + + "\n\027RunCleanerChoreResponse\022\031\n\021cleaner_cho" + + "re_ran\030\001 \002(\010\"+\n\035SetCleanerChoreRunningRe" + + "quest\022\n\n\002on\030\001 \002(\010\"4\n\036SetCleanerChoreRunn" + + "ingResponse\022\022\n\nprev_value\030\001 \001(\010\"\036\n\034IsCle", + "anerChoreEnabledRequest\".\n\035IsCleanerChor" + + "eEnabledResponse\022\r\n\005value\030\001 \002(\010\"B\n\017Snaps" + + "hotRequest\022/\n\010snapshot\030\001 \002(\0132\035.hbase.pb." + + "SnapshotDescription\",\n\020SnapshotResponse\022" + + "\030\n\020expected_timeout\030\001 \002(\003\"\036\n\034GetComplete" + + "dSnapshotsRequest\"Q\n\035GetCompletedSnapsho" + + "tsResponse\0220\n\tsnapshots\030\001 \003(\0132\035.hbase.pb" + + ".SnapshotDescription\"H\n\025DeleteSnapshotRe" + + "quest\022/\n\010snapshot\030\001 \002(\0132\035.hbase.pb.Snaps" + + "hotDescription\"\030\n\026DeleteSnapshotResponse", + "\"s\n\026RestoreSnapshotRequest\022/\n\010snapshot\030\001" + + " \002(\0132\035.hbase.pb.SnapshotDescription\022\026\n\013n" + + "once_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"*\n" + + "\027RestoreSnapshotResponse\022\017\n\007proc_id\030\001 \002(" + + "\004\"H\n\025IsSnapshotDoneRequest\022/\n\010snapshot\030\001" + + " \001(\0132\035.hbase.pb.SnapshotDescription\"^\n\026I" + + "sSnapshotDoneResponse\022\023\n\004done\030\001 \001(\010:\005fal" + + "se\022/\n\010snapshot\030\002 \001(\0132\035.hbase.pb.Snapshot" + + "Description\"O\n\034IsRestoreSnapshotDoneRequ" + + "est\022/\n\010snapshot\030\001 \001(\0132\035.hbase.pb.Snapsho", + "tDescription\"4\n\035IsRestoreSnapshotDoneRes" + + "ponse\022\023\n\004done\030\001 \001(\010:\005false\"F\n\033GetSchemaA" + + "lterStatusRequest\022\'\n\ntable_name\030\001 \002(\0132\023." + + "hbase.pb.TableName\"T\n\034GetSchemaAlterStat" + + "usResponse\022\035\n\025yet_to_update_regions\030\001 \001(" + + "\r\022\025\n\rtotal_regions\030\002 \001(\r\"\213\001\n\032GetTableDes" + + "criptorsRequest\022(\n\013table_names\030\001 \003(\0132\023.h" + + "base.pb.TableName\022\r\n\005regex\030\002 \001(\t\022!\n\022incl" + + "ude_sys_tables\030\003 \001(\010:\005false\022\021\n\tnamespace" + + "\030\004 \001(\t\"J\n\033GetTableDescriptorsResponse\022+\n", + "\014table_schema\030\001 \003(\0132\025.hbase.pb.TableSche" + + "ma\"[\n\024GetTableNamesRequest\022\r\n\005regex\030\001 \001(" + + "\t\022!\n\022include_sys_tables\030\002 \001(\010:\005false\022\021\n\t" + + "namespace\030\003 \001(\t\"A\n\025GetTableNamesResponse" + + "\022(\n\013table_names\030\001 \003(\0132\023.hbase.pb.TableNa" + + "me\"?\n\024GetTableStateRequest\022\'\n\ntable_name" + + "\030\001 \002(\0132\023.hbase.pb.TableName\"B\n\025GetTableS" + + "tateResponse\022)\n\013table_state\030\001 \002(\0132\024.hbas" + + "e.pb.TableState\"\031\n\027GetClusterStatusReque" + + "st\"K\n\030GetClusterStatusResponse\022/\n\016cluste", + "r_status\030\001 \002(\0132\027.hbase.pb.ClusterStatus\"" + + "\030\n\026IsMasterRunningRequest\"4\n\027IsMasterRun" + + "ningResponse\022\031\n\021is_master_running\030\001 \002(\010\"" + + "I\n\024ExecProcedureRequest\0221\n\tprocedure\030\001 \002" + + "(\0132\036.hbase.pb.ProcedureDescription\"F\n\025Ex" + + "ecProcedureResponse\022\030\n\020expected_timeout\030" + + "\001 \001(\003\022\023\n\013return_data\030\002 \001(\014\"K\n\026IsProcedur" + + "eDoneRequest\0221\n\tprocedure\030\001 \001(\0132\036.hbase." + + "pb.ProcedureDescription\"`\n\027IsProcedureDo" + + "neResponse\022\023\n\004done\030\001 \001(\010:\005false\0220\n\010snaps", + "hot\030\002 \001(\0132\036.hbase.pb.ProcedureDescriptio" + + "n\",\n\031GetProcedureResultRequest\022\017\n\007proc_i" + + "d\030\001 \002(\004\"\371\001\n\032GetProcedureResultResponse\0229" + + "\n\005state\030\001 \002(\0162*.hbase.pb.GetProcedureRes" + + "ultResponse.State\022\022\n\nstart_time\030\002 \001(\004\022\023\n" + + "\013last_update\030\003 \001(\004\022\016\n\006result\030\004 \001(\014\0224\n\tex" + + "ception\030\005 \001(\0132!.hbase.pb.ForeignExceptio" + + "nMessage\"1\n\005State\022\r\n\tNOT_FOUND\020\000\022\013\n\007RUNN" + + "ING\020\001\022\014\n\010FINISHED\020\002\"M\n\025AbortProcedureReq" + + "uest\022\017\n\007proc_id\030\001 \002(\004\022#\n\025mayInterruptIfR", + "unning\030\002 \001(\010:\004true\"6\n\026AbortProcedureResp" + + "onse\022\034\n\024is_procedure_aborted\030\001 \002(\010\"\027\n\025Li" + + "stProceduresRequest\"@\n\026ListProceduresRes" + + "ponse\022&\n\tprocedure\030\001 \003(\0132\023.hbase.pb.Proc" + + "edure\"\022\n\020ListLocksRequest\"1\n\021ListLocksRe" + + "sponse\022\034\n\004lock\030\001 \003(\0132\016.hbase.pb.Lock\"\315\001\n" + + "\017SetQuotaRequest\022\021\n\tuser_name\030\001 \001(\t\022\022\n\nu" + + "ser_group\030\002 \001(\t\022\021\n\tnamespace\030\003 \001(\t\022\'\n\nta" + + "ble_name\030\004 \001(\0132\023.hbase.pb.TableName\022\022\n\nr" + + "emove_all\030\005 \001(\010\022\026\n\016bypass_globals\030\006 \001(\010\022", + "+\n\010throttle\030\007 \001(\0132\031.hbase.pb.ThrottleReq" + + "uest\"\022\n\020SetQuotaResponse\"J\n\037MajorCompact" + + "ionTimestampRequest\022\'\n\ntable_name\030\001 \002(\0132" + + "\023.hbase.pb.TableName\"U\n(MajorCompactionT" + + "imestampForRegionRequest\022)\n\006region\030\001 \002(\013" + + "2\031.hbase.pb.RegionSpecifier\"@\n MajorComp" + + "actionTimestampResponse\022\034\n\024compaction_ti" + + "mestamp\030\001 \002(\003\"\035\n\033SecurityCapabilitiesReq" + + "uest\"\354\001\n\034SecurityCapabilitiesResponse\022G\n" + + "\014capabilities\030\001 \003(\01621.hbase.pb.SecurityC", + "apabilitiesResponse.Capability\"\202\001\n\nCapab" + + "ility\022\031\n\025SIMPLE_AUTHENTICATION\020\000\022\031\n\025SECU" + + "RE_AUTHENTICATION\020\001\022\021\n\rAUTHORIZATION\020\002\022\026" + + "\n\022CELL_AUTHORIZATION\020\003\022\023\n\017CELL_VISIBILIT" + + "Y\020\004\"\"\n ListDrainingRegionServersRequest\"" + + "N\n!ListDrainingRegionServersResponse\022)\n\013" + + "server_name\030\001 \003(\0132\024.hbase.pb.ServerName\"" + + "F\n\031DrainRegionServersRequest\022)\n\013server_n" + + "ame\030\001 \003(\0132\024.hbase.pb.ServerName\"\034\n\032Drain" + + "RegionServersResponse\"P\n#RemoveDrainFrom", + "RegionServersRequest\022)\n\013server_name\030\001 \003(" + + "\0132\024.hbase.pb.ServerName\"&\n$RemoveDrainFr" + + "omRegionServersResponse*(\n\020MasterSwitchT" + + "ype\022\t\n\005SPLIT\020\000\022\t\n\005MERGE\020\0012\2074\n\rMasterServ" + + "ice\022e\n\024GetSchemaAlterStatus\022%.hbase.pb.G" + + "etSchemaAlterStatusRequest\032&.hbase.pb.Ge" + + "tSchemaAlterStatusResponse\022b\n\023GetTableDe" + + "scriptors\022$.hbase.pb.GetTableDescriptors" + + "Request\032%.hbase.pb.GetTableDescriptorsRe" + + "sponse\022P\n\rGetTableNames\022\036.hbase.pb.GetTa", + "bleNamesRequest\032\037.hbase.pb.GetTableNames" + + "Response\022Y\n\020GetClusterStatus\022!.hbase.pb." + + "GetClusterStatusRequest\032\".hbase.pb.GetCl" + + "usterStatusResponse\022V\n\017IsMasterRunning\022 " + + ".hbase.pb.IsMasterRunningRequest\032!.hbase" + + ".pb.IsMasterRunningResponse\022D\n\tAddColumn" + + "\022\032.hbase.pb.AddColumnRequest\032\033.hbase.pb." + + "AddColumnResponse\022M\n\014DeleteColumn\022\035.hbas" + + "e.pb.DeleteColumnRequest\032\036.hbase.pb.Dele" + + "teColumnResponse\022M\n\014ModifyColumn\022\035.hbase", + ".pb.ModifyColumnRequest\032\036.hbase.pb.Modif" + + "yColumnResponse\022G\n\nMoveRegion\022\033.hbase.pb" + + ".MoveRegionRequest\032\034.hbase.pb.MoveRegion" + + "Response\022\\\n\021MergeTableRegions\022\".hbase.pb" + + ".MergeTableRegionsRequest\032#.hbase.pb.Mer" + + "geTableRegionsResponse\022M\n\014AssignRegion\022\035" + + ".hbase.pb.AssignRegionRequest\032\036.hbase.pb" + + ".AssignRegionResponse\022S\n\016UnassignRegion\022" + + "\037.hbase.pb.UnassignRegionRequest\032 .hbase" + + ".pb.UnassignRegionResponse\022P\n\rOfflineReg", + "ion\022\036.hbase.pb.OfflineRegionRequest\032\037.hb" + + "ase.pb.OfflineRegionResponse\022J\n\013DeleteTa" + + "ble\022\034.hbase.pb.DeleteTableRequest\032\035.hbas" + + "e.pb.DeleteTableResponse\022P\n\rtruncateTabl" + + "e\022\036.hbase.pb.TruncateTableRequest\032\037.hbas" + + "e.pb.TruncateTableResponse\022J\n\013EnableTabl" + + "e\022\034.hbase.pb.EnableTableRequest\032\035.hbase." + + "pb.EnableTableResponse\022M\n\014DisableTable\022\035" + + ".hbase.pb.DisableTableRequest\032\036.hbase.pb" + + ".DisableTableResponse\022J\n\013ModifyTable\022\034.h", + "base.pb.ModifyTableRequest\032\035.hbase.pb.Mo" + + "difyTableResponse\022J\n\013CreateTable\022\034.hbase" + + ".pb.CreateTableRequest\032\035.hbase.pb.Create" + + "TableResponse\022A\n\010Shutdown\022\031.hbase.pb.Shu" + + "tdownRequest\032\032.hbase.pb.ShutdownResponse" + + "\022G\n\nStopMaster\022\033.hbase.pb.StopMasterRequ" + + "est\032\034.hbase.pb.StopMasterResponse\022h\n\031IsM" + + "asterInMaintenanceMode\022$.hbase.pb.IsInMa" + + "intenanceModeRequest\032%.hbase.pb.IsInMain" + + "tenanceModeResponse\022>\n\007Balance\022\030.hbase.p", + "b.BalanceRequest\032\031.hbase.pb.BalanceRespo" + + "nse\022_\n\022SetBalancerRunning\022#.hbase.pb.Set" + + "BalancerRunningRequest\032$.hbase.pb.SetBal" + + "ancerRunningResponse\022\\\n\021IsBalancerEnable" + + "d\022\".hbase.pb.IsBalancerEnabledRequest\032#." + + "hbase.pb.IsBalancerEnabledResponse\022k\n\026Se" + + "tSplitOrMergeEnabled\022\'.hbase.pb.SetSplit" + + "OrMergeEnabledRequest\032(.hbase.pb.SetSpli" + + "tOrMergeEnabledResponse\022h\n\025IsSplitOrMerg" + + "eEnabled\022&.hbase.pb.IsSplitOrMergeEnable", + "dRequest\032\'.hbase.pb.IsSplitOrMergeEnable" + + "dResponse\022D\n\tNormalize\022\032.hbase.pb.Normal" + + "izeRequest\032\033.hbase.pb.NormalizeResponse\022" + + "e\n\024SetNormalizerRunning\022%.hbase.pb.SetNo" + + "rmalizerRunningRequest\032&.hbase.pb.SetNor" + + "malizerRunningResponse\022b\n\023IsNormalizerEn" + + "abled\022$.hbase.pb.IsNormalizerEnabledRequ" + + "est\032%.hbase.pb.IsNormalizerEnabledRespon" + + "se\022S\n\016RunCatalogScan\022\037.hbase.pb.RunCatal" + + "ogScanRequest\032 .hbase.pb.RunCatalogScanR", + "esponse\022e\n\024EnableCatalogJanitor\022%.hbase." + + "pb.EnableCatalogJanitorRequest\032&.hbase.p" + + "b.EnableCatalogJanitorResponse\022n\n\027IsCata" + + "logJanitorEnabled\022(.hbase.pb.IsCatalogJa" + + "nitorEnabledRequest\032).hbase.pb.IsCatalog" + + "JanitorEnabledResponse\022V\n\017RunCleanerChor" + + "e\022 .hbase.pb.RunCleanerChoreRequest\032!.hb" + + "ase.pb.RunCleanerChoreResponse\022k\n\026SetCle" + + "anerChoreRunning\022\'.hbase.pb.SetCleanerCh" + + "oreRunningRequest\032(.hbase.pb.SetCleanerC", + "horeRunningResponse\022h\n\025IsCleanerChoreEna" + + "bled\022&.hbase.pb.IsCleanerChoreEnabledReq" + + "uest\032\'.hbase.pb.IsCleanerChoreEnabledRes" + + "ponse\022^\n\021ExecMasterService\022#.hbase.pb.Co" + + "processorServiceRequest\032$.hbase.pb.Copro" + + "cessorServiceResponse\022A\n\010Snapshot\022\031.hbas" + + "e.pb.SnapshotRequest\032\032.hbase.pb.Snapshot" + + "Response\022h\n\025GetCompletedSnapshots\022&.hbas" + + "e.pb.GetCompletedSnapshotsRequest\032\'.hbas" + + "e.pb.GetCompletedSnapshotsResponse\022S\n\016De", + "leteSnapshot\022\037.hbase.pb.DeleteSnapshotRe" + + "quest\032 .hbase.pb.DeleteSnapshotResponse\022" + + "S\n\016IsSnapshotDone\022\037.hbase.pb.IsSnapshotD" + + "oneRequest\032 .hbase.pb.IsSnapshotDoneResp" + + "onse\022V\n\017RestoreSnapshot\022 .hbase.pb.Resto" + + "reSnapshotRequest\032!.hbase.pb.RestoreSnap" + + "shotResponse\022P\n\rExecProcedure\022\036.hbase.pb" + + ".ExecProcedureRequest\032\037.hbase.pb.ExecPro" + + "cedureResponse\022W\n\024ExecProcedureWithRet\022\036" + + ".hbase.pb.ExecProcedureRequest\032\037.hbase.p", + "b.ExecProcedureResponse\022V\n\017IsProcedureDo" + + "ne\022 .hbase.pb.IsProcedureDoneRequest\032!.h" + + "base.pb.IsProcedureDoneResponse\022V\n\017Modif" + + "yNamespace\022 .hbase.pb.ModifyNamespaceReq" + + "uest\032!.hbase.pb.ModifyNamespaceResponse\022" + + "V\n\017CreateNamespace\022 .hbase.pb.CreateName" + + "spaceRequest\032!.hbase.pb.CreateNamespaceR" + + "esponse\022V\n\017DeleteNamespace\022 .hbase.pb.De" + + "leteNamespaceRequest\032!.hbase.pb.DeleteNa" + + "mespaceResponse\022k\n\026GetNamespaceDescripto", + "r\022\'.hbase.pb.GetNamespaceDescriptorReque" + + "st\032(.hbase.pb.GetNamespaceDescriptorResp" + + "onse\022q\n\030ListNamespaceDescriptors\022).hbase" + + ".pb.ListNamespaceDescriptorsRequest\032*.hb" + + "ase.pb.ListNamespaceDescriptorsResponse\022" + + "\206\001\n\037ListTableDescriptorsByNamespace\0220.hb" + + "ase.pb.ListTableDescriptorsByNamespaceRe" + + "quest\0321.hbase.pb.ListTableDescriptorsByN" + + "amespaceResponse\022t\n\031ListTableNamesByName" + + "space\022*.hbase.pb.ListTableNamesByNamespa", + "ceRequest\032+.hbase.pb.ListTableNamesByNam" + + "espaceResponse\022P\n\rGetTableState\022\036.hbase." + + "pb.GetTableStateRequest\032\037.hbase.pb.GetTa" + + "bleStateResponse\022A\n\010SetQuota\022\031.hbase.pb." + + "SetQuotaRequest\032\032.hbase.pb.SetQuotaRespo" + + "nse\022x\n\037getLastMajorCompactionTimestamp\022)" + + ".hbase.pb.MajorCompactionTimestampReques" + + "t\032*.hbase.pb.MajorCompactionTimestampRes" + + "ponse\022\212\001\n(getLastMajorCompactionTimestam" + + "pForRegion\0222.hbase.pb.MajorCompactionTim", + "estampForRegionRequest\032*.hbase.pb.MajorC" + + "ompactionTimestampResponse\022_\n\022getProcedu" + + "reResult\022#.hbase.pb.GetProcedureResultRe" + + "quest\032$.hbase.pb.GetProcedureResultRespo" + + "nse\022h\n\027getSecurityCapabilities\022%.hbase.p" + + "b.SecurityCapabilitiesRequest\032&.hbase.pb" + + ".SecurityCapabilitiesResponse\022S\n\016AbortPr" + + "ocedure\022\037.hbase.pb.AbortProcedureRequest" + + "\032 .hbase.pb.AbortProcedureResponse\022S\n\016Li" + + "stProcedures\022\037.hbase.pb.ListProceduresRe", + "quest\032 .hbase.pb.ListProceduresResponse\022" + + "D\n\tListLocks\022\032.hbase.pb.ListLocksRequest" + + "\032\033.hbase.pb.ListLocksResponse\022_\n\022AddRepl" + + "icationPeer\022#.hbase.pb.AddReplicationPee" + + "rRequest\032$.hbase.pb.AddReplicationPeerRe" + + "sponse\022h\n\025RemoveReplicationPeer\022&.hbase." + + "pb.RemoveReplicationPeerRequest\032\'.hbase." + + "pb.RemoveReplicationPeerResponse\022h\n\025Enab" + + "leReplicationPeer\022&.hbase.pb.EnableRepli" + + "cationPeerRequest\032\'.hbase.pb.EnableRepli", + "cationPeerResponse\022k\n\026DisableReplication" + + "Peer\022\'.hbase.pb.DisableReplicationPeerRe" + + "quest\032(.hbase.pb.DisableReplicationPeerR" + + "esponse\022q\n\030GetReplicationPeerConfig\022).hb" + + "ase.pb.GetReplicationPeerConfigRequest\032*" + + ".hbase.pb.GetReplicationPeerConfigRespon" + + "se\022z\n\033UpdateReplicationPeerConfig\022,.hbas" + + "e.pb.UpdateReplicationPeerConfigRequest\032" + + "-.hbase.pb.UpdateReplicationPeerConfigRe" + + "sponse\022e\n\024ListReplicationPeers\022%.hbase.p", + "b.ListReplicationPeersRequest\032&.hbase.pb" + + ".ListReplicationPeersResponse\022t\n\031listDra" + + "iningRegionServers\022*.hbase.pb.ListDraini" + + "ngRegionServersRequest\032+.hbase.pb.ListDr" + + "ainingRegionServersResponse\022_\n\022drainRegi" + + "onServers\022#.hbase.pb.DrainRegionServersR" + + "equest\032$.hbase.pb.DrainRegionServersResp" + + "onse\022}\n\034removeDrainFromRegionServers\022-.h" + + "base.pb.RemoveDrainFromRegionServersRequ" + + "est\032..hbase.pb.RemoveDrainFromRegionServ", + "ersResponseBI\n1org.apache.hadoop.hbase.s" + + "haded.protobuf.generatedB\014MasterProtosH\001" + + "\210\001\001\240\001\001" }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -76853,6 +78065,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.getDescriptor(), + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.getDescriptor(), org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.getDescriptor(), @@ -77517,80 +78730,92 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ListProceduresResponse_descriptor, new java.lang.String[] { "Procedure", }); - internal_static_hbase_pb_SetQuotaRequest_descriptor = + internal_static_hbase_pb_ListLocksRequest_descriptor = getDescriptor().getMessageTypes().get(110); + internal_static_hbase_pb_ListLocksRequest_fieldAccessorTable = new + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ListLocksRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_ListLocksResponse_descriptor = + getDescriptor().getMessageTypes().get(111); + internal_static_hbase_pb_ListLocksResponse_fieldAccessorTable = new + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_hbase_pb_ListLocksResponse_descriptor, + new java.lang.String[] { "Lock", }); + internal_static_hbase_pb_SetQuotaRequest_descriptor = + getDescriptor().getMessageTypes().get(112); internal_static_hbase_pb_SetQuotaRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SetQuotaRequest_descriptor, new java.lang.String[] { "UserName", "UserGroup", "Namespace", "TableName", "RemoveAll", "BypassGlobals", "Throttle", }); internal_static_hbase_pb_SetQuotaResponse_descriptor = - getDescriptor().getMessageTypes().get(111); + getDescriptor().getMessageTypes().get(113); internal_static_hbase_pb_SetQuotaResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SetQuotaResponse_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor = - getDescriptor().getMessageTypes().get(112); + getDescriptor().getMessageTypes().get(114); internal_static_hbase_pb_MajorCompactionTimestampRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MajorCompactionTimestampRequest_descriptor, new java.lang.String[] { "TableName", }); internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(113); + getDescriptor().getMessageTypes().get(115); internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MajorCompactionTimestampForRegionRequest_descriptor, new java.lang.String[] { "Region", }); internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor = - getDescriptor().getMessageTypes().get(114); + getDescriptor().getMessageTypes().get(116); internal_static_hbase_pb_MajorCompactionTimestampResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor, new java.lang.String[] { "CompactionTimestamp", }); internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor = - getDescriptor().getMessageTypes().get(115); + getDescriptor().getMessageTypes().get(117); internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor = - getDescriptor().getMessageTypes().get(116); + getDescriptor().getMessageTypes().get(118); internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor, new java.lang.String[] { "Capabilities", }); internal_static_hbase_pb_ListDrainingRegionServersRequest_descriptor = - getDescriptor().getMessageTypes().get(117); + getDescriptor().getMessageTypes().get(119); internal_static_hbase_pb_ListDrainingRegionServersRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ListDrainingRegionServersRequest_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_ListDrainingRegionServersResponse_descriptor = - getDescriptor().getMessageTypes().get(118); + getDescriptor().getMessageTypes().get(120); internal_static_hbase_pb_ListDrainingRegionServersResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_ListDrainingRegionServersResponse_descriptor, new java.lang.String[] { "ServerName", }); internal_static_hbase_pb_DrainRegionServersRequest_descriptor = - getDescriptor().getMessageTypes().get(119); + getDescriptor().getMessageTypes().get(121); internal_static_hbase_pb_DrainRegionServersRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_DrainRegionServersRequest_descriptor, new java.lang.String[] { "ServerName", }); internal_static_hbase_pb_DrainRegionServersResponse_descriptor = - getDescriptor().getMessageTypes().get(120); + getDescriptor().getMessageTypes().get(122); internal_static_hbase_pb_DrainRegionServersResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_DrainRegionServersResponse_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_RemoveDrainFromRegionServersRequest_descriptor = - getDescriptor().getMessageTypes().get(121); + getDescriptor().getMessageTypes().get(123); internal_static_hbase_pb_RemoveDrainFromRegionServersRequest_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RemoveDrainFromRegionServersRequest_descriptor, new java.lang.String[] { "ServerName", }); internal_static_hbase_pb_RemoveDrainFromRegionServersResponse_descriptor = - getDescriptor().getMessageTypes().get(122); + getDescriptor().getMessageTypes().get(124); internal_static_hbase_pb_RemoveDrainFromRegionServersResponse_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_hbase_pb_RemoveDrainFromRegionServersResponse_descriptor, @@ -77599,6 +78824,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.getDescriptor(); + org.apache.hadoop.hbase.shaded.protobuf.generated.LockProtos.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.getDescriptor(); org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.getDescriptor(); diff --git a/hbase-protocol-shaded/src/main/protobuf/Lock.proto b/hbase-protocol-shaded/src/main/protobuf/Lock.proto new file mode 100644 index 0000000..28ffc0a --- /dev/null +++ b/hbase-protocol-shaded/src/main/protobuf/Lock.proto @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package hbase.pb; + +option java_package = "org.apache.hadoop.hbase.shaded.protobuf.generated"; +option java_outer_classname = "LockProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "LockService.proto"; + +enum ResourceType { + RESOURCE_TYPE_SERVER = 1; + RESOURCE_TYPE_NAMESPACE = 2; + RESOURCE_TYPE_TABLE = 3; + RESOURCE_TYPE_REGION = 4; +} + +message WaitingProcedure { + required LockType lock_type = 1; + required int64 proc_id = 2; +} + +message Lock { + required ResourceType resource_type = 1; + optional string resource_name = 2; + required LockType lock_type = 3; + optional int64 exclusive_lock_owner_proc_id = 4; + optional int32 shared_lock_count = 5; + repeated WaitingProcedure waitingProcedures = 6; +} diff --git a/hbase-protocol-shaded/src/main/protobuf/Master.proto b/hbase-protocol-shaded/src/main/protobuf/Master.proto index e22695b..9988cdb 100644 --- a/hbase-protocol-shaded/src/main/protobuf/Master.proto +++ b/hbase-protocol-shaded/src/main/protobuf/Master.proto @@ -30,6 +30,7 @@ import "HBase.proto"; import "Client.proto"; import "ClusterStatus.proto"; import "ErrorHandling.proto"; +import "Lock.proto"; import "Procedure.proto"; import "Quota.proto"; import "Replication.proto"; @@ -534,6 +535,13 @@ message ListProceduresResponse { repeated Procedure procedure = 1; } +message ListLocksRequest { +} + +message ListLocksResponse { + repeated Lock lock = 1; +} + message SetQuotaRequest { optional string user_name = 1; optional string user_group = 2; @@ -888,6 +896,9 @@ service MasterService { rpc ListProcedures(ListProceduresRequest) returns(ListProceduresResponse); + rpc ListLocks(ListLocksRequest) + returns(ListLocksResponse); + /** Add a replication peer */ rpc AddReplicationPeer(AddReplicationPeerRequest) returns(AddReplicationPeerResponse); diff --git a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon index 36d5112..e1a47c5 100644 --- a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon +++ b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon @@ -125,7 +125,7 @@ AssignmentManager assignmentManager = master.getAssignmentManager();