diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 2addc01..46b0941 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -1020,7 +1020,7 @@ public class HColumnDescriptor implements WritableComparable * @param cfs * @return An {@link HColumnDescriptor} made from the passed in cfs */ - static HColumnDescriptor convert(final ColumnFamilySchema cfs) { + public static HColumnDescriptor convert(final ColumnFamilySchema cfs) { // Use the empty constructor so we preserve the initial values set on construction for things // like maxVersion. Otherwise, we pick up wrong values on deserialization which makes for // unrelated-looking test failures that are hard to trace back to here. @@ -1035,7 +1035,7 @@ public class HColumnDescriptor implements WritableComparable /** * @return Convert this instance to a the pb column family type */ - ColumnFamilySchema convert() { + public ColumnFamilySchema convert() { ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder(); builder.setName(ByteString.copyFrom(getName())); for (Map.Entry e: this.values.entrySet()) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index 9a4bf96..359eaab 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -1251,7 +1251,7 @@ public class HTableDescriptor implements WritableComparable { /** * @return Convert the current {@link HTableDescriptor} into a pb TableSchema instance. */ - TableSchema convert() { + public TableSchema convert() { TableSchema.Builder builder = TableSchema.newBuilder(); builder.setName(ByteString.copyFrom(getName())); for (Map.Entry e: this.values.entrySet()) { @@ -1270,7 +1270,7 @@ public class HTableDescriptor implements WritableComparable { * @param ts A pb TableSchema instance. * @return An {@link HTableDescriptor} made from the passed in pb ts. */ - static HTableDescriptor convert(final TableSchema ts) { + public static HTableDescriptor convert(final TableSchema ts) { List list = ts.getColumnFamiliesList(); HColumnDescriptor [] hcds = new HColumnDescriptor[list.size()]; int index = 0; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 9bf2396..ca33b25 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -74,6 +74,19 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRespo import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest; @@ -493,8 +506,9 @@ public class HBaseAdmin implements Abortable, Closeable { execute(new MasterCallable() { @Override - public Void call() throws IOException { - master.createTable(desc, splitKeys); + public Void call() throws ServiceException { + CreateTableRequest request = RequestConverter.buildCreateTableRequest(desc, splitKeys); + master.createTable(null, request); return null; } }); @@ -525,8 +539,9 @@ public class HBaseAdmin implements Abortable, Closeable { execute(new MasterCallable() { @Override - public Void call() throws IOException { - master.deleteTable(tableName); + public Void call() throws ServiceException { + DeleteTableRequest req = RequestConverter.buildDeleteTableRequest(tableName); + master.deleteTable(null,req); return null; } }); @@ -554,19 +569,21 @@ public class HBaseAdmin implements Abortable, Closeable { // HMaster removes the table from its HTableDescriptors if (values == null || values.length == 0) { tableExists = false; - HTableDescriptor[] htds; + GetTableDescriptorsResponse htds; MasterKeepAliveConnection master = connection.getKeepAliveMaster(); try { - htds = master.getHTableDescriptors(); + GetTableDescriptorsRequest req = + RequestConverter.buildGetTableDescriptorsRequest(null); + htds = master.getTableDescriptors(null, req); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); } finally { master.close(); } - if (htds != null && htds.length > 0) { - for (HTableDescriptor htd: htds) { - if (Bytes.equals(tableName, htd.getName())) { - tableExists = true; - break; - } + for (TableSchema ts : htds.getTableSchemaList()) { + if (Bytes.equals(tableName, ts.getName().toByteArray())) { + tableExists = true; + break; } } if (!tableExists) { @@ -709,9 +726,10 @@ public class HBaseAdmin implements Abortable, Closeable { throws IOException { execute(new MasterCallable() { @Override - public Void call() throws IOException { + public Void call() throws ServiceException { LOG.info("Started enable of " + Bytes.toString(tableName)); - master.enableTable(tableName); + EnableTableRequest req = RequestConverter.buildEnableTableRequest(tableName); + master.enableTable(null,req); return null; } }); @@ -778,9 +796,10 @@ public class HBaseAdmin implements Abortable, Closeable { public void disableTableAsync(final byte [] tableName) throws IOException { execute(new MasterCallable() { @Override - public Void call() throws IOException { + public Void call() throws ServiceException { LOG.info("Started disable of " + Bytes.toString(tableName)); - master.disableTable(tableName); + DisableTableRequest req = RequestConverter.buildDisableTableRequest(tableName); + master.disableTable(null,req); return null; } }); @@ -948,8 +967,14 @@ public class HBaseAdmin implements Abortable, Closeable { HTableDescriptor.isLegalTableName(tableName); return execute(new MasterCallable>() { @Override - public Pair call() throws IOException { - return master.getAlterStatus(tableName); + public Pair call() throws ServiceException { + GetSchemaAlterStatusRequest req = + RequestConverter.buildGetSchemaAlterStatusRequest(tableName); + GetSchemaAlterStatusResponse ret = master.getSchemaAlterStatus(null,req); + Pair pair = + new Pair( + new Integer(ret.getYetToUpdateRegions()),new Integer(ret.getTotalRegions())); + return pair; } }); } @@ -979,8 +1004,9 @@ public class HBaseAdmin implements Abortable, Closeable { throws IOException { execute(new MasterCallable() { @Override - public Void call() throws IOException { - master.addColumn(tableName, column); + public Void call() throws ServiceException { + AddColumnRequest req = RequestConverter.buildAddColumnRequest(tableName, column); + master.addColumn(null,req); return null; } }); @@ -1011,8 +1037,9 @@ public class HBaseAdmin implements Abortable, Closeable { throws IOException { execute(new MasterCallable() { @Override - public Void call() throws IOException { - master.deleteColumn(tableName, columnName); + public Void call() throws ServiceException { + DeleteColumnRequest req = RequestConverter.buildDeleteColumnRequest(tableName, columnName); + master.deleteColumn(null,req); return null; } }); @@ -1045,8 +1072,9 @@ public class HBaseAdmin implements Abortable, Closeable { throws IOException { execute(new MasterCallable() { @Override - public Void call() throws IOException { - master.modifyColumn(tableName, descriptor); + public Void call() throws ServiceException { + ModifyColumnRequest req = RequestConverter.buildModifyColumnRequest(tableName, descriptor); + master.modifyColumn(null,req); return null; } }); @@ -1426,6 +1454,21 @@ public class HBaseAdmin implements Abortable, Closeable { } /** + * Special method, only used by hbck. + */ + public void offline(final byte [] regionName) + throws IOException { + MasterKeepAliveConnection master = connection.getKeepAliveMaster(); + try { + master.offlineRegion(null,RequestConverter.buildOfflineRegionRequest(regionName)); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } finally { + master.close(); + } + } + + /** * Turn the load balancer on or off. * @param b If true, enable balancer. If false, disable balancer. * @return Previous balancer value @@ -1565,8 +1608,9 @@ public class HBaseAdmin implements Abortable, Closeable { throws IOException { execute(new MasterCallable() { @Override - public Void call() throws IOException { - master.modifyTable(tableName, htd); + public Void call() throws ServiceException { + ModifyTableRequest request = RequestConverter.buildModifyTableRequest(tableName, htd); + master.modifyTable(null, request); return null; } }); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java index a72e72d..259be08 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java @@ -79,6 +79,9 @@ import org.apache.hadoop.hbase.ipc.HMasterInterface; import org.apache.hadoop.hbase.ipc.VersionedProtocol; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Addressing; @@ -1586,10 +1589,6 @@ public class HConnectionManager { if (cause instanceof UndeclaredThrowableException) { cause = cause.getCause(); } - if (cause instanceof ServiceException) { - ServiceException se = (ServiceException)cause; - cause = ProtobufUtil.getRemoteException(se); - } throw cause; } } @@ -2245,7 +2244,11 @@ public class HConnectionManager { public HTableDescriptor[] listTables() throws IOException { MasterKeepAliveConnection master = getKeepAliveMaster(); try { - return master.getHTableDescriptors(); + GetTableDescriptorsRequest req = + RequestConverter.buildGetTableDescriptorsRequest(null); + return ProtobufUtil.getHTableDescriptorArray(master.getTableDescriptors(null, req)); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); } finally { master.close(); } @@ -2256,8 +2259,12 @@ public class HConnectionManager { if (tableNames == null || tableNames.isEmpty()) return new HTableDescriptor[0]; MasterKeepAliveConnection master = getKeepAliveMaster(); try { - return master.getHTableDescriptors(tableNames); - }finally { + GetTableDescriptorsRequest req = + RequestConverter.buildGetTableDescriptorsRequest(tableNames); + return ProtobufUtil.getHTableDescriptorArray(master.getTableDescriptors(null, req)); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } finally { master.close(); } } @@ -2280,17 +2287,19 @@ public class HConnectionManager { return HTableDescriptor.META_TABLEDESC; } MasterKeepAliveConnection master = getKeepAliveMaster(); - HTableDescriptor[] htds; + GetTableDescriptorsResponse htds; try { - htds = master.getHTableDescriptors(); - }finally { + GetTableDescriptorsRequest req = + RequestConverter.buildGetTableDescriptorsRequest(null); + htds = master.getTableDescriptors(null, req); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } finally { master.close(); } - if (htds != null && htds.length > 0) { - for (HTableDescriptor htd: htds) { - if (Bytes.equals(tableName, htd.getName())) { - return htd; - } + for (TableSchema ts : htds.getTableSchemaList()) { + if (Bytes.equals(tableName, ts.getName().toByteArray())) { + return HTableDescriptor.convert(ts); } } throw new TableNotFoundException(Bytes.toString(tableName)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java index da7451d..0175884 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java @@ -27,10 +27,32 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest; @@ -94,85 +116,106 @@ public interface HMasterInterface extends VersionedProtocol { * Creates a new table asynchronously. If splitKeys are specified, then the * table will be created with an initial set of multiple regions. * If splitKeys is null, the table will be created with a single region. - * @param desc table descriptor - * @param splitKeys - * @throws IOException + * @param controller Unused (set to null). + * @param req CreateTableRequest that contains:
+ * - tablesSchema: table descriptor
+ * - splitKeys + * @throws ServiceException */ - public void createTable(HTableDescriptor desc, byte [][] splitKeys) - throws IOException; + public CreateTableResponse createTable(RpcController controller, CreateTableRequest req) + throws ServiceException; /** * Deletes a table - * @param tableName table to delete - * @throws IOException e + * @param controller Unused (set to null). + * @param req DeleteTableRequest that contains:
+ * - tableName: table to delete + * @throws ServiceException */ - public void deleteTable(final byte [] tableName) throws IOException; + public DeleteTableResponse deleteTable(RpcController controller, DeleteTableRequest req) + throws ServiceException; /** * Used by the client to get the number of regions that have received the * updated schema * - * @param tableName - * @return Pair indicating the number of regions updated Pair.getFirst() is the - * regions that are yet to be updated Pair.getSecond() is the total number - * of regions of the table - * @throws IOException + * @param controller Unused (set to null). + * @param req GetSchemaAlterStatusRequest that contains:
+ * - tableName + * @return GetSchemaAlterStatusResponse indicating the number of regions updated. + * yetToUpdateRegions is the regions that are yet to be updated totalRegions + * is the total number of regions of the table + * @throws ServiceException */ - public Pair getAlterStatus(byte[] tableName) - throws IOException; + public GetSchemaAlterStatusResponse getSchemaAlterStatus( + RpcController controller, GetSchemaAlterStatusRequest req) throws ServiceException; /** * Adds a column to the specified table - * @param tableName table to modify - * @param column column descriptor - * @throws IOException e + * @param controller Unused (set to null). + * @param req AddColumnRequest that contains:
+ * - tableName: table to modify
+ * - column: column descriptor + * @throws ServiceException */ - public void addColumn(final byte [] tableName, HColumnDescriptor column) - throws IOException; + public AddColumnResponse addColumn(RpcController controller, AddColumnRequest req) + throws ServiceException; /** * Modifies an existing column on the specified table - * @param tableName table name - * @param descriptor new column descriptor + * @param controller Unused (set to null). + * @param req ModifyColumnRequest that contains:
+ * - tableName: table name
+ * - descriptor: new column descriptor * @throws IOException e */ - public void modifyColumn(final byte [] tableName, HColumnDescriptor descriptor) - throws IOException; + public ModifyColumnResponse modifyColumn(RpcController controller, ModifyColumnRequest req) + throws ServiceException; /** * Deletes a column from the specified table. Table must be disabled. - * @param tableName table to alter - * @param columnName column family to remove - * @throws IOException e + * @param controller Unused (set to null). + * @param req DeleteColumnRequest that contains:
+ * - tableName: table to alter
+ * - columnName: column family to remove + * @throws ServiceException */ - public void deleteColumn(final byte [] tableName, final byte [] columnName) - throws IOException; + public DeleteColumnResponse deleteColumn(RpcController controller, DeleteColumnRequest req) + throws ServiceException; /** * Puts the table on-line (only needed if table has been previously taken offline) - * @param tableName table to enable - * @throws IOException e + * @param controller Unused (set to null). + * @param req EnableTableRequest that contains:
+ * - tableName: table to enable + * @throws ServiceException */ - public void enableTable(final byte [] tableName) throws IOException; + public EnableTableResponse enableTable(RpcController controller, EnableTableRequest req) + throws ServiceException; /** * Take table offline * - * @param tableName table to take offline - * @throws IOException e + * @param controller Unused (set to null). + * @param req DisableTableRequest that contains:
+ * - tableName: table to take offline + * @throws ServiceException */ - public void disableTable(final byte [] tableName) throws IOException; + public DisableTableResponse disableTable(RpcController controller, DisableTableRequest req) + throws ServiceException; /** * Modify a table's metadata * - * @param tableName table to modify - * @param htd new descriptor for table - * @throws IOException e + * @param controller Unused (set to null). + * @param req ModifyTableRequest that contains:
+ * - tableName: table to modify
+ * - tableSchema: new descriptor for table + * @throws ServiceException */ - public void modifyTable(byte[] tableName, HTableDescriptor htd) - throws IOException; + public ModifyTableResponse modifyTable(RpcController controller, ModifyTableRequest req) + throws ServiceException; /** * Shutdown an HBase cluster. @@ -206,11 +249,14 @@ public interface HMasterInterface extends VersionedProtocol { * region should be in a closed state and there will be no attempt to * automatically reassign the region as in unassign. This is a special * method, and should only be used by experts or hbck. - * @param regionName Region to offline. Will clear any existing RegionPlan + * @param controller Unused (set to null). + * @param request OfflineRegionRequest that contains:
+ * - region: Region to offline. Will clear any existing RegionPlan * if one found. - * @throws IOException + * @throws ServiceException */ - public void offline(final byte[] regionName) throws IOException; + public OfflineRegionResponse offlineRegion(RpcController controller, OfflineRegionRequest request) + throws ServiceException; /** * Run the balancer. Will run the balancer and if regions to move, it will @@ -239,17 +285,15 @@ public interface HMasterInterface extends VersionedProtocol { throws ServiceException; /** - * Get array of all HTDs. - * @return array of HTableDescriptor - */ - public HTableDescriptor[] getHTableDescriptors(); - - /** - * Get array of HTDs for requested tables. - * @param tableNames - * @return array of HTableDescriptor + * Get list of TableDescriptors for requested tables. + * @param controller Unused (set to null). + * @param req GetTableDescriptorsRequest that contains:
+ * - tableNames: requested tables, or if empty, all are requested + * @return GetTableDescriptorsResponse + * @throws ServiceException */ - public HTableDescriptor[] getHTableDescriptors(List tableNames); + public GetTableDescriptorsResponse getTableDescriptors( + RpcController controller, GetTableDescriptorsRequest req) throws ServiceException; /** * Assign a region to a server chosen at random. @@ -267,7 +311,7 @@ public interface HMasterInterface extends VersionedProtocol { * back to the same server. Use {@link #moveRegion(RpcController,MoveRegionRequest} * if you want to control the region movement. * @param controller Unused (set to null). - * @param req The request which contains:
+ * @param req The request that contains:
* - region: Region to unassign. Will clear any existing RegionPlan * if one found.
* - force: If true, force unassign (Will remove region from @@ -281,7 +325,7 @@ public interface HMasterInterface extends VersionedProtocol { /** * Move a region to a specified destination server. * @param controller Unused (set to null). - * @param req The request which contains:
+ * @param req The request that contains:
* - region: The encoded region name; i.e. the hash that makes * up the region name suffix: e.g. if regionname is * TestTable,0094429456,1289497600452.527db22f95c8a9e0116f0cc13c680396., @@ -294,5 +338,5 @@ public interface HMasterInterface extends VersionedProtocol { * region named encodedRegionName */ public MoveRegionResponse moveRegion(RpcController controller, MoveRegionRequest req) - throws ServiceException; + throws ServiceException; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java index 70e9bc1..a0f9155 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java @@ -75,6 +75,7 @@ public class Invocation extends VersionedWritable implements Configurable { PROTOBUF_PROTOCOLS.add(ClientProtocol.class); PROTOBUF_PROTOCOLS.add(AdminProtocol.class); PROTOBUF_PROTOCOLS.add(RegionServerStatusProtocol.class); + PROTOBUF_PROTOCOLS.add(HMasterInterface.class); } private static byte RPC_VERSION = 1; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java index 384387d..2d1676c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java @@ -69,15 +69,6 @@ class WritableRpcEngine implements RpcEngine { // DEBUG log level does NOT emit RPC-level logging. private static final Log LOG = LogFactory.getLog("org.apache.hadoop.ipc.RPCEngine"); - // For protobuf protocols, which use ServiceException, instead of IOException - protected static final Set> - PROTOBUF_PROTOCOLS = new HashSet>(); - - static { - PROTOBUF_PROTOCOLS.add(ClientProtocol.class); - PROTOBUF_PROTOCOLS.add(AdminProtocol.class); - } - /* Cache a client using its socket factory as the hash key */ static private class ClientCache { private Map clients = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index a3af820..93708b5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -123,10 +123,32 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Re import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest; @@ -1005,7 +1027,7 @@ Server { resp.addMapEntries(entry.build()); return resp.build(); - } catch(IOException ioe) { + } catch (IOException ioe) { throw new ServiceException(ioe); } } @@ -1050,7 +1072,7 @@ Server { // Up our metrics. this.metrics.incrementRequests(sl.getTotalNumberOfRequests()); } - } catch(IOException ioe) { + } catch (IOException ioe) { throw new ServiceException(ioe); } @@ -1246,7 +1268,7 @@ Server { if (type != RegionSpecifierType.ENCODED_REGION_NAME) { LOG.warn("moveRegion specifier type: expected: " + RegionSpecifierType.ENCODED_REGION_NAME - + " actual: " + RegionSpecifierType.REGION_NAME); + + " actual: " + type); } Pair p = this.assignmentManager.getAssignment(encodedRegionName); @@ -1294,6 +1316,7 @@ Server { return mrr; } + @Override public void createTable(HTableDescriptor hTableDescriptor, byte [][] splitKeys) throws IOException { @@ -1310,10 +1333,23 @@ Server { this.executorService.submit(new CreateTableHandler(this, this.fileSystemManager, this.serverManager, hTableDescriptor, conf, newRegions, catalogTracker, assignmentManager)); - if (cpHost != null) { cpHost.postCreateTable(hTableDescriptor, newRegions); } + + } + + @Override + public CreateTableResponse createTable(RpcController controller, CreateTableRequest req) + throws ServiceException { + HTableDescriptor hTableDescriptor = HTableDescriptor.convert(req.getTableSchema()); + byte [][] splitKeys = ProtobufUtil.getSplitKeysArray(req); + try { + createTable(hTableDescriptor,splitKeys); + } catch (IOException ioe) { + throw new ServiceException(ioe); + } + return CreateTableResponse.newBuilder().build(); } private HRegionInfo[] getHRegionInfos(HTableDescriptor hTableDescriptor, @@ -1343,15 +1379,23 @@ Server { } @Override - public void deleteTable(final byte [] tableName) throws IOException { - checkInitialized(); - if (cpHost != null) { - cpHost.preDeleteTable(tableName); - } - this.executorService.submit(new DeleteTableHandler(tableName, this, this)); - if (cpHost != null) { - cpHost.postDeleteTable(tableName); + public DeleteTableResponse deleteTable(RpcController controller, DeleteTableRequest request) + throws ServiceException { + byte [] tableName = request.getTableName().toByteArray(); + try { + checkInitialized(); + if (cpHost != null) { + cpHost.preDeleteTable(tableName); + } + this.executorService.submit(new DeleteTableHandler(tableName, this, this)); + + if (cpHost != null) { + cpHost.postDeleteTable(tableName); + } + } catch (IOException ioe) { + throw new ServiceException(ioe); } + return DeleteTableResponse.newBuilder().build(); } /** @@ -1362,81 +1406,132 @@ Server { * of regions of the table * @throws IOException */ - public Pair getAlterStatus(byte[] tableName) - throws IOException { + @Override + public GetSchemaAlterStatusResponse getSchemaAlterStatus( + RpcController controller, GetSchemaAlterStatusRequest req) throws ServiceException { // TODO: currently, we query using the table name on the client side. this // may overlap with other table operations or the table operation may // have completed before querying this API. We need to refactor to a // transaction system in the future to avoid these ambiguities. - return this.assignmentManager.getReopenStatus(tableName); + byte [] tableName = req.getTableName().toByteArray(); + + try { + Pair pair = this.assignmentManager.getReopenStatus(tableName); + GetSchemaAlterStatusResponse.Builder ret = GetSchemaAlterStatusResponse.newBuilder(); + ret.setYetToUpdateRegions(pair.getFirst()); + ret.setTotalRegions(pair.getSecond()); + return ret.build(); + } catch (IOException ioe) { + throw new ServiceException(ioe); + } } - public void addColumn(byte [] tableName, HColumnDescriptor column) - throws IOException { - checkInitialized(); - if (cpHost != null) { - if (cpHost.preAddColumn(tableName, column)) { - return; + public AddColumnResponse addColumn(RpcController controller, AddColumnRequest req) + throws ServiceException { + byte [] tableName = req.getTableName().toByteArray(); + HColumnDescriptor column = HColumnDescriptor.convert(req.getColumnFamilies()); + + try { + checkInitialized(); + if (cpHost != null) { + if (cpHost.preAddColumn(tableName, column)) { + return AddColumnResponse.newBuilder().build(); + } } + new TableAddFamilyHandler(tableName, column, this, this).process(); + if (cpHost != null) { + cpHost.postAddColumn(tableName, column); + } + } catch (IOException ioe) { + throw new ServiceException(ioe); } - new TableAddFamilyHandler(tableName, column, this, this).process(); - if (cpHost != null) { - cpHost.postAddColumn(tableName, column); - } + return AddColumnResponse.newBuilder().build(); } - public void modifyColumn(byte [] tableName, HColumnDescriptor descriptor) - throws IOException { - checkInitialized(); - if (cpHost != null) { - if (cpHost.preModifyColumn(tableName, descriptor)) { - return; + public ModifyColumnResponse modifyColumn(RpcController controller, ModifyColumnRequest req) + throws ServiceException { + byte [] tableName = req.getTableName().toByteArray(); + HColumnDescriptor descriptor = HColumnDescriptor.convert(req.getColumnFamilies()); + + try { + checkInitialized(); + if (cpHost != null) { + if (cpHost.preModifyColumn(tableName, descriptor)) { + return ModifyColumnResponse.newBuilder().build(); + } } + new TableModifyFamilyHandler(tableName, descriptor, this, this).process(); + if (cpHost != null) { + cpHost.postModifyColumn(tableName, descriptor); + } + } catch (IOException ioe) { + throw new ServiceException(ioe); } - new TableModifyFamilyHandler(tableName, descriptor, this, this).process(); - if (cpHost != null) { - cpHost.postModifyColumn(tableName, descriptor); - } + return ModifyColumnResponse.newBuilder().build(); } - public void deleteColumn(final byte [] tableName, final byte [] c) - throws IOException { - checkInitialized(); - if (cpHost != null) { - if (cpHost.preDeleteColumn(tableName, c)) { - return; + @Override + public DeleteColumnResponse deleteColumn(RpcController controller, DeleteColumnRequest req) + throws ServiceException { + final byte [] tableName = req.getTableName().toByteArray(); + final byte [] columnName = req.getColumnName().toByteArray(); + try { + checkInitialized(); + if (cpHost != null) { + if (cpHost.preDeleteColumn(tableName, columnName)) { + return DeleteColumnResponse.newBuilder().build(); + } } + new TableDeleteFamilyHandler(tableName, columnName, this, this).process(); + if (cpHost != null) { + cpHost.postDeleteColumn(tableName, columnName); + } + } catch (IOException ioe) { + throw new ServiceException(ioe); } - new TableDeleteFamilyHandler(tableName, c, this, this).process(); - if (cpHost != null) { - cpHost.postDeleteColumn(tableName, c); - } + return DeleteColumnResponse.newBuilder().build(); } - public void enableTable(final byte [] tableName) throws IOException { - checkInitialized(); - if (cpHost != null) { - cpHost.preEnableTable(tableName); - } - this.executorService.submit(new EnableTableHandler(this, tableName, - catalogTracker, assignmentManager, false)); + @Override + public EnableTableResponse enableTable(RpcController controller, EnableTableRequest request) + throws ServiceException { + byte [] tableName = request.getTableName().toByteArray(); + try { + checkInitialized(); + if (cpHost != null) { + cpHost.preEnableTable(tableName); + } + this.executorService.submit(new EnableTableHandler(this, tableName, + catalogTracker, assignmentManager, false)); - if (cpHost != null) { - cpHost.postEnableTable(tableName); + if (cpHost != null) { + cpHost.postEnableTable(tableName); + } + } catch (IOException ioe) { + throw new ServiceException(ioe); } + return EnableTableResponse.newBuilder().build(); } - public void disableTable(final byte [] tableName) throws IOException { - checkInitialized(); - if (cpHost != null) { - cpHost.preDisableTable(tableName); - } - this.executorService.submit(new DisableTableHandler(this, tableName, - catalogTracker, assignmentManager, false)); + @Override + public DisableTableResponse disableTable(RpcController controller, DisableTableRequest request) + throws ServiceException { + byte [] tableName = request.getTableName().toByteArray(); + try { + checkInitialized(); + if (cpHost != null) { + cpHost.preDisableTable(tableName); + } + this.executorService.submit(new DisableTableHandler(this, tableName, + catalogTracker, assignmentManager, false)); - if (cpHost != null) { - cpHost.postDisableTable(tableName); + if (cpHost != null) { + cpHost.postDisableTable(tableName); + } + } catch (IOException ioe) { + throw new ServiceException(ioe); } + return DisableTableResponse.newBuilder().build(); } /** @@ -1475,19 +1570,26 @@ Server { } @Override - public void modifyTable(final byte[] tableName, HTableDescriptor htd) - throws IOException { - checkInitialized(); - if (cpHost != null) { - cpHost.preModifyTable(tableName, htd); - } - TableEventHandler tblHandle = new ModifyTableHandler(tableName, htd, this, this); - this.executorService.submit(tblHandle); - tblHandle.waitForPersist(); + public ModifyTableResponse modifyTable(RpcController controller, ModifyTableRequest req) + throws ServiceException { + final byte [] tableName = req.getTableName().toByteArray(); + HTableDescriptor htd = HTableDescriptor.convert(req.getTableSchema()); + try { + checkInitialized(); + if (cpHost != null) { + cpHost.preModifyTable(tableName, htd); + } + TableEventHandler tblHandle = new ModifyTableHandler(tableName, htd, this, this); + this.executorService.submit(tblHandle); + tblHandle.waitForPersist(); - if (cpHost != null) { - cpHost.postModifyTable(tableName, htd); + if (cpHost != null) { + cpHost.postModifyTable(tableName, htd); + } + } catch (IOException ioe) { + throw new ServiceException(ioe); } + return ModifyTableResponse.newBuilder().build(); } @Override @@ -1854,7 +1956,7 @@ Server { checkInitialized(); if (type != RegionSpecifierType.REGION_NAME) { LOG.warn("assignRegion specifier type: expected: " + RegionSpecifierType.REGION_NAME - + " actual: " + RegionSpecifierType.ENCODED_REGION_NAME); + + " actual: " + type); } Pair pair = MetaReader.getRegion(this.catalogTracker, regionName); @@ -1891,7 +1993,7 @@ Server { checkInitialized(); if (type != RegionSpecifierType.REGION_NAME) { LOG.warn("unassignRegion specifier type: expected: " + RegionSpecifierType.REGION_NAME - + " actual: " + RegionSpecifierType.ENCODED_REGION_NAME); + + " actual: " + type); } Pair pair = MetaReader.getRegion(this.catalogTracker, regionName); @@ -1919,39 +2021,43 @@ Server { } /** - * Get HTD array for given tables - * @param tableNames - * @return HTableDescriptor[] + * Get list of TableDescriptors for requested tables. + * @param controller Unused (set to null). + * @param req GetTableDescriptorsRequest that contains: + * - tableNames: requested tables, or if empty, all are requested + * @return GetTableDescriptorsResponse + * @throws ServiceException */ - public HTableDescriptor[] getHTableDescriptors(List tableNames) { - List list = - new ArrayList(tableNames.size()); - for (String s: tableNames) { - HTableDescriptor htd = null; + public GetTableDescriptorsResponse getTableDescriptors( + RpcController controller, GetTableDescriptorsRequest req) throws ServiceException { + GetTableDescriptorsResponse.Builder builder = GetTableDescriptorsResponse.newBuilder(); + if (req.getTableNamesCount() == 0) { + // request for all TableDescriptors + Map descriptors = null; try { - htd = this.tableDescriptors.get(s); + descriptors = this.tableDescriptors.getAll(); } catch (IOException e) { - LOG.warn("Failed getting descriptor for " + s, e); + LOG.warn("Failed getting all descriptors", e); + } + if (descriptors != null) { + for (HTableDescriptor htd : descriptors.values()) { + builder.addTableSchema(htd.convert()); + } } - if (htd == null) continue; - list.add(htd); } - return list.toArray(new HTableDescriptor [] {}); - } - - /** - * Get all table descriptors - * @return All descriptors or null if none. - */ - public HTableDescriptor [] getHTableDescriptors() { - Map descriptors = null; - try { - descriptors = this.tableDescriptors.getAll(); - } catch (IOException e) { - LOG.warn("Failed getting all descriptors", e); + else { + for (String s: req.getTableNamesList()) { + HTableDescriptor htd = null; + try { + htd = this.tableDescriptors.get(s); + } catch (IOException e) { + LOG.warn("Failed getting descriptor for " + s, e); + } + if (htd == null) continue; + builder.addTableSchema(htd.convert()); + } } - return descriptors == null? - null: descriptors.values().toArray(new HTableDescriptor [] {}); + return builder.build(); } /** @@ -1968,12 +2074,25 @@ Server { * Special method, only used by hbck. */ @Override - public void offline(final byte[] regionName) throws IOException { - Pair pair = - MetaReader.getRegion(this.catalogTracker, regionName); - if (pair == null) throw new UnknownRegionException(Bytes.toStringBinary(regionName)); - HRegionInfo hri = pair.getFirst(); - this.assignmentManager.regionOffline(hri); + public OfflineRegionResponse offlineRegion(RpcController controller, OfflineRegionRequest request) + throws ServiceException { + final byte [] regionName = request.getRegion().getValue().toByteArray(); + RegionSpecifierType type = request.getRegion().getType(); + if (type != RegionSpecifierType.REGION_NAME) { + LOG.warn("moveRegion specifier type: expected: " + RegionSpecifierType.REGION_NAME + + " actual: " + type); + } + + try { + Pair pair = + MetaReader.getRegion(this.catalogTracker, regionName); + if (pair == null) throw new UnknownRegionException(Bytes.toStringBinary(regionName)); + HRegionInfo hri = pair.getFirst(); + this.assignmentManager.regionOffline(hri); + } catch (IOException ioe) { + throw new ServiceException(ioe); + } + return OfflineRegionResponse.newBuilder().build(); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 6c04758..7ac2687 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Action; @@ -104,6 +105,8 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; @@ -290,6 +293,36 @@ public final class ProtobufUtil { } /** + * Get HTableDescriptor[] from GetTableDescriptorsResponse protobuf + * + * @param proto the GetTableDescriptorsResponse + * @return HTableDescriptor[] + */ + public static HTableDescriptor[] getHTableDescriptorArray(GetTableDescriptorsResponse proto) { + if (proto == null) return null; + + HTableDescriptor[] ret = new HTableDescriptor[proto.getTableSchemaCount()]; + for (int i = 0; i < proto.getTableSchemaCount(); ++i) { + ret[i] = HTableDescriptor.convert(proto.getTableSchema(i)); + } + return ret; + } + + /** + * get the split keys in form "byte [][]" from a CreateTableRequest proto + * + * @param proto the CreateTableRequest + * @return the split keys + */ + public static byte [][] getSplitKeysArray(final CreateTableRequest proto) { + byte [][] splitKeys = new byte[proto.getSplitKeysCount()][]; + for (int i = 0; i < proto.getSplitKeysCount(); ++i) { + splitKeys[i] = proto.getSplitKeys(i).toByteArray(); + } + return splitKeys; + } + + /** * Convert a protocol buffer Get to a client Get * * @param get the protocol buffer Get to convert diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java index fe04023..80ff056 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -26,7 +26,9 @@ import java.util.UUID; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.DeserializationException; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Action; @@ -78,8 +80,19 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest; @@ -857,6 +870,51 @@ public final class RequestConverter { } /** + * Create a protocol buffer AddColumnRequest + * + * @param tableName + * @param column + * @return an AddColumnRequest + */ + public static AddColumnRequest buildAddColumnRequest( + final byte [] tableName, final HColumnDescriptor column) { + AddColumnRequest.Builder builder = AddColumnRequest.newBuilder(); + builder.setTableName(ByteString.copyFrom(tableName)); + builder.setColumnFamilies(column.convert()); + return builder.build(); + } + + /** + * Create a protocol buffer DeleteColumnRequest + * + * @param tableName + * @param columnName + * @return a DeleteColumnRequest + */ + public static DeleteColumnRequest buildDeleteColumnRequest( + final byte [] tableName, final byte [] columnName) { + DeleteColumnRequest.Builder builder = DeleteColumnRequest.newBuilder(); + builder.setTableName(ByteString.copyFrom(tableName)); + builder.setColumnName(ByteString.copyFrom(columnName)); + return builder.build(); + } + + /** + * Create a protocol buffer ModifyColumnRequest + * + * @param tableName + * @param column + * @return an ModifyColumnRequest + */ + public static ModifyColumnRequest buildModifyColumnRequest( + final byte [] tableName, final HColumnDescriptor column) { + ModifyColumnRequest.Builder builder = ModifyColumnRequest.newBuilder(); + builder.setTableName(ByteString.copyFrom(tableName)); + builder.setColumnFamilies(column.convert()); + return builder.build(); + } + + /** * Create a protocol buffer MoveRegionRequest * * @param encodedRegionName @@ -880,7 +938,7 @@ public final class RequestConverter { * Create a protocol buffer AssignRegionRequest * * @param regionName - * @return An AssignRegionRequest + * @return an AssignRegionRequest */ public static AssignRegionRequest buildAssignRegionRequest(final byte [] regionName) { AssignRegionRequest.Builder builder = AssignRegionRequest.newBuilder(); @@ -893,7 +951,7 @@ public final class RequestConverter { * * @param regionName * @param force - * @return An UnassignRegionRequest + * @return an UnassignRegionRequest */ public static UnassignRegionRequest buildUnassignRegionRequest( final byte [] regionName, final boolean force) { @@ -904,6 +962,118 @@ public final class RequestConverter { } /** + * Creates a protocol buffer OfflineRegionRequest + * + * @param regionName + * @return an OfflineRegionRequest + */ + public static OfflineRegionRequest buildOfflineRegionRequest(final byte [] regionName) { + OfflineRegionRequest.Builder builder = OfflineRegionRequest.newBuilder(); + builder.setRegion(buildRegionSpecifier(RegionSpecifierType.REGION_NAME,regionName)); + return builder.build(); + } + + /** + * Creates a protocol buffer DeleteTableRequest + * + * @param tableName + * @return a DeleteTableRequest + */ + public static DeleteTableRequest buildDeleteTableRequest(final byte [] tableName) { + DeleteTableRequest.Builder builder = DeleteTableRequest.newBuilder(); + builder.setTableName(ByteString.copyFrom(tableName)); + return builder.build(); + } + + /** + * Creates a protocol buffer EnableTableRequest + * + * @param tableName + * @return an EnableTableRequest + */ + public static EnableTableRequest buildEnableTableRequest(final byte [] tableName) { + EnableTableRequest.Builder builder = EnableTableRequest.newBuilder(); + builder.setTableName(ByteString.copyFrom(tableName)); + return builder.build(); + } + + /** + * Creates a protocol buffer DisableTableRequest + * + * @param tableName + * @return a DisableTableRequest + */ + public static DisableTableRequest buildDisableTableRequest(final byte [] tableName) { + DisableTableRequest.Builder builder = DisableTableRequest.newBuilder(); + builder.setTableName(ByteString.copyFrom(tableName)); + return builder.build(); + } + + /** + * Creates a protocol buffer CreateTableRequest + * + * @param hTableDesc + * @param splitKeys + * @return a CreateTableRequest + */ + public static CreateTableRequest buildCreateTableRequest( + final HTableDescriptor hTableDesc, final byte [][] splitKeys) { + CreateTableRequest.Builder builder = CreateTableRequest.newBuilder(); + builder.setTableSchema(hTableDesc.convert()); + if (splitKeys != null) { + for (byte [] splitKey : splitKeys) { + builder.addSplitKeys(ByteString.copyFrom(splitKey)); + } + } + return builder.build(); + } + + + /** + * Creates a protocol buffer ModifyTableRequest + * + * @param table + * @param hTableDesc + * @return a ModifyTableRequest + */ + public static ModifyTableRequest buildModifyTableRequest( + final byte [] table, final HTableDescriptor hTableDesc) { + ModifyTableRequest.Builder builder = ModifyTableRequest.newBuilder(); + builder.setTableName(ByteString.copyFrom(table)); + builder.setTableSchema(hTableDesc.convert()); + return builder.build(); + } + + /** + * Creates a protocol buffer GetSchemaAlterStatusRequest + * + * @param tableName + * @return a GetSchemaAlterStatusRequest + */ + public static GetSchemaAlterStatusRequest buildGetSchemaAlterStatusRequest(final byte [] table) { + GetSchemaAlterStatusRequest.Builder builder = GetSchemaAlterStatusRequest.newBuilder(); + builder.setTableName(ByteString.copyFrom(table)); + return builder.build(); + } + + /** + * Creates a protocol buffer GetTableDescriptorsRequest + * + * @param tableNames + * @return a GetTableDescriptorsRequest + */ + public static GetTableDescriptorsRequest buildGetTableDescriptorsRequest( + final List tableNames) { + GetTableDescriptorsRequest.Builder builder = GetTableDescriptorsRequest.newBuilder(); + if (tableNames != null) { + for (String str : tableNames) { + builder.addTableNames(str); + } + } + return builder.build(); + } + + /** * Creates a protocol buffer IsMasterRunningRequest * * @return a IsMasterRunningRequest diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java index 213b6a5..7a90146 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java @@ -8,6 +8,2465 @@ public final class MasterProtos { public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } + public interface AddColumnRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required .ColumnFamilySchema columnFamilies = 2; + boolean hasColumnFamilies(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); + } + public static final class AddColumnRequest extends + com.google.protobuf.GeneratedMessage + implements AddColumnRequestOrBuilder { + // Use AddColumnRequest.newBuilder() to construct. + private AddColumnRequest(Builder builder) { + super(builder); + } + private AddColumnRequest(boolean noInit) {} + + private static final AddColumnRequest defaultInstance; + public static AddColumnRequest getDefaultInstance() { + return defaultInstance; + } + + public AddColumnRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required .ColumnFamilySchema columnFamilies = 2; + public static final int COLUMNFAMILIES_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; + public boolean hasColumnFamilies() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { + return columnFamilies_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { + return columnFamilies_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasColumnFamilies()) { + memoizedIsInitialized = 0; + return false; + } + if (!getColumnFamilies().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, columnFamilies_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, columnFamilies_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasColumnFamilies() == other.hasColumnFamilies()); + if (hasColumnFamilies()) { + result = result && getColumnFamilies() + .equals(other.getColumnFamilies()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasColumnFamilies()) { + hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER; + hash = (53 * hash) + getColumnFamilies().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnFamiliesFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + } else { + columnFamiliesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (columnFamiliesBuilder_ == null) { + result.columnFamilies_ = columnFamilies_; + } else { + result.columnFamilies_ = columnFamiliesBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasColumnFamilies()) { + mergeColumnFamilies(other.getColumnFamilies()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + if (!hasColumnFamilies()) { + + return false; + } + if (!getColumnFamilies().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); + if (hasColumnFamilies()) { + subBuilder.mergeFrom(getColumnFamilies()); + } + input.readMessage(subBuilder, extensionRegistry); + setColumnFamilies(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required .ColumnFamilySchema columnFamilies = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; + public boolean hasColumnFamilies() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { + if (columnFamiliesBuilder_ == null) { + return columnFamilies_; + } else { + return columnFamiliesBuilder_.getMessage(); + } + } + public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { + if (columnFamiliesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + columnFamilies_ = value; + onChanged(); + } else { + columnFamiliesBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setColumnFamilies( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = builderForValue.build(); + onChanged(); + } else { + columnFamiliesBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { + if (columnFamiliesBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { + columnFamilies_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); + } else { + columnFamilies_ = value; + } + onChanged(); + } else { + columnFamiliesBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearColumnFamilies() { + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + onChanged(); + } else { + columnFamiliesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getColumnFamiliesFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { + if (columnFamiliesBuilder_ != null) { + return columnFamiliesBuilder_.getMessageOrBuilder(); + } else { + return columnFamilies_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> + getColumnFamiliesFieldBuilder() { + if (columnFamiliesBuilder_ == null) { + columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( + columnFamilies_, + getParentForChildren(), + isClean()); + columnFamilies_ = null; + } + return columnFamiliesBuilder_; + } + + // @@protoc_insertion_point(builder_scope:AddColumnRequest) + } + + static { + defaultInstance = new AddColumnRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AddColumnRequest) + } + + public interface AddColumnResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class AddColumnResponse extends + com.google.protobuf.GeneratedMessage + implements AddColumnResponseOrBuilder { + // Use AddColumnResponse.newBuilder() to construct. + private AddColumnResponse(Builder builder) { + super(builder); + } + private AddColumnResponse(boolean noInit) {} + + private static final AddColumnResponse defaultInstance; + public static AddColumnResponse getDefaultInstance() { + return defaultInstance; + } + + public AddColumnResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:AddColumnResponse) + } + + static { + defaultInstance = new AddColumnResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AddColumnResponse) + } + + public interface DeleteColumnRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required bytes columnName = 2; + boolean hasColumnName(); + com.google.protobuf.ByteString getColumnName(); + } + public static final class DeleteColumnRequest extends + com.google.protobuf.GeneratedMessage + implements DeleteColumnRequestOrBuilder { + // Use DeleteColumnRequest.newBuilder() to construct. + private DeleteColumnRequest(Builder builder) { + super(builder); + } + private DeleteColumnRequest(boolean noInit) {} + + private static final DeleteColumnRequest defaultInstance; + public static DeleteColumnRequest getDefaultInstance() { + return defaultInstance; + } + + public DeleteColumnRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required bytes columnName = 2; + public static final int COLUMNNAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString columnName_; + public boolean hasColumnName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getColumnName() { + return columnName_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + columnName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasColumnName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, columnName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, columnName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasColumnName() == other.hasColumnName()); + if (hasColumnName()) { + result = result && getColumnName() + .equals(other.getColumnName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasColumnName()) { + hash = (37 * hash) + COLUMNNAME_FIELD_NUMBER; + hash = (53 * hash) + getColumnName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + columnName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.columnName_ = columnName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasColumnName()) { + setColumnName(other.getColumnName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + if (!hasColumnName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + columnName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required bytes columnName = 2; + private com.google.protobuf.ByteString columnName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasColumnName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getColumnName() { + return columnName_; + } + public Builder setColumnName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + columnName_ = value; + onChanged(); + return this; + } + public Builder clearColumnName() { + bitField0_ = (bitField0_ & ~0x00000002); + columnName_ = getDefaultInstance().getColumnName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:DeleteColumnRequest) + } + + static { + defaultInstance = new DeleteColumnRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DeleteColumnRequest) + } + + public interface DeleteColumnResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class DeleteColumnResponse extends + com.google.protobuf.GeneratedMessage + implements DeleteColumnResponseOrBuilder { + // Use DeleteColumnResponse.newBuilder() to construct. + private DeleteColumnResponse(Builder builder) { + super(builder); + } + private DeleteColumnResponse(boolean noInit) {} + + private static final DeleteColumnResponse defaultInstance; + public static DeleteColumnResponse getDefaultInstance() { + return defaultInstance; + } + + public DeleteColumnResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:DeleteColumnResponse) + } + + static { + defaultInstance = new DeleteColumnResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DeleteColumnResponse) + } + + public interface ModifyColumnRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required .ColumnFamilySchema columnFamilies = 2; + boolean hasColumnFamilies(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(); + } + public static final class ModifyColumnRequest extends + com.google.protobuf.GeneratedMessage + implements ModifyColumnRequestOrBuilder { + // Use ModifyColumnRequest.newBuilder() to construct. + private ModifyColumnRequest(Builder builder) { + super(builder); + } + private ModifyColumnRequest(boolean noInit) {} + + private static final ModifyColumnRequest defaultInstance; + public static ModifyColumnRequest getDefaultInstance() { + return defaultInstance; + } + + public ModifyColumnRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required .ColumnFamilySchema columnFamilies = 2; + public static final int COLUMNFAMILIES_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_; + public boolean hasColumnFamilies() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { + return columnFamilies_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { + return columnFamilies_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasColumnFamilies()) { + memoizedIsInitialized = 0; + return false; + } + if (!getColumnFamilies().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, columnFamilies_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, columnFamilies_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasColumnFamilies() == other.hasColumnFamilies()); + if (hasColumnFamilies()) { + result = result && getColumnFamilies() + .equals(other.getColumnFamilies()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasColumnFamilies()) { + hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER; + hash = (53 * hash) + getColumnFamilies().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnFamiliesFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + } else { + columnFamiliesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (columnFamiliesBuilder_ == null) { + result.columnFamilies_ = columnFamilies_; + } else { + result.columnFamilies_ = columnFamiliesBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasColumnFamilies()) { + mergeColumnFamilies(other.getColumnFamilies()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + if (!hasColumnFamilies()) { + + return false; + } + if (!getColumnFamilies().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(); + if (hasColumnFamilies()) { + subBuilder.mergeFrom(getColumnFamilies()); + } + input.readMessage(subBuilder, extensionRegistry); + setColumnFamilies(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required .ColumnFamilySchema columnFamilies = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; + public boolean hasColumnFamilies() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() { + if (columnFamiliesBuilder_ == null) { + return columnFamilies_; + } else { + return columnFamiliesBuilder_.getMessage(); + } + } + public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { + if (columnFamiliesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + columnFamilies_ = value; + onChanged(); + } else { + columnFamiliesBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setColumnFamilies( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = builderForValue.build(); + onChanged(); + } else { + columnFamiliesBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { + if (columnFamiliesBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) { + columnFamilies_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial(); + } else { + columnFamilies_ = value; + } + onChanged(); + } else { + columnFamiliesBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearColumnFamilies() { + if (columnFamiliesBuilder_ == null) { + columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); + onChanged(); + } else { + columnFamiliesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getColumnFamiliesFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() { + if (columnFamiliesBuilder_ != null) { + return columnFamiliesBuilder_.getMessageOrBuilder(); + } else { + return columnFamilies_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> + getColumnFamiliesFieldBuilder() { + if (columnFamiliesBuilder_ == null) { + columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( + columnFamilies_, + getParentForChildren(), + isClean()); + columnFamilies_ = null; + } + return columnFamiliesBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ModifyColumnRequest) + } + + static { + defaultInstance = new ModifyColumnRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ModifyColumnRequest) + } + + public interface ModifyColumnResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class ModifyColumnResponse extends + com.google.protobuf.GeneratedMessage + implements ModifyColumnResponseOrBuilder { + // Use ModifyColumnResponse.newBuilder() to construct. + private ModifyColumnResponse(Builder builder) { + super(builder); + } + private ModifyColumnResponse(boolean noInit) {} + + private static final ModifyColumnResponse defaultInstance; + public static ModifyColumnResponse getDefaultInstance() { + return defaultInstance; + } + + public ModifyColumnResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:ModifyColumnResponse) + } + + static { + defaultInstance = new ModifyColumnResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ModifyColumnResponse) + } + public interface MoveRegionRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -2557,6 +5016,4559 @@ public final class MasterProtos { // @@protoc_insertion_point(class_scope:UnassignRegionResponse) } + public interface OfflineRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + } + public static final class OfflineRegionRequest extends + com.google.protobuf.GeneratedMessage + implements OfflineRegionRequestOrBuilder { + // Use OfflineRegionRequest.newBuilder() to construct. + private OfflineRegionRequest(Builder builder) { + super(builder); + } + private OfflineRegionRequest(boolean noInit) {} + + private static final OfflineRegionRequest defaultInstance; + public static OfflineRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public OfflineRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:OfflineRegionRequest) + } + + static { + defaultInstance = new OfflineRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:OfflineRegionRequest) + } + + public interface OfflineRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class OfflineRegionResponse extends + com.google.protobuf.GeneratedMessage + implements OfflineRegionResponseOrBuilder { + // Use OfflineRegionResponse.newBuilder() to construct. + private OfflineRegionResponse(Builder builder) { + super(builder); + } + private OfflineRegionResponse(boolean noInit) {} + + private static final OfflineRegionResponse defaultInstance; + public static OfflineRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public OfflineRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:OfflineRegionResponse) + } + + static { + defaultInstance = new OfflineRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:OfflineRegionResponse) + } + + public interface CreateTableRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .TableSchema tableSchema = 1; + boolean hasTableSchema(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); + + // repeated bytes splitKeys = 2; + java.util.List getSplitKeysList(); + int getSplitKeysCount(); + com.google.protobuf.ByteString getSplitKeys(int index); + } + public static final class CreateTableRequest extends + com.google.protobuf.GeneratedMessage + implements CreateTableRequestOrBuilder { + // Use CreateTableRequest.newBuilder() to construct. + private CreateTableRequest(Builder builder) { + super(builder); + } + private CreateTableRequest(boolean noInit) {} + + private static final CreateTableRequest defaultInstance; + public static CreateTableRequest getDefaultInstance() { + return defaultInstance; + } + + public CreateTableRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .TableSchema tableSchema = 1; + public static final int TABLESCHEMA_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; + public boolean hasTableSchema() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { + return tableSchema_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { + return tableSchema_; + } + + // repeated bytes splitKeys = 2; + public static final int SPLITKEYS_FIELD_NUMBER = 2; + private java.util.List splitKeys_; + public java.util.List + getSplitKeysList() { + return splitKeys_; + } + public int getSplitKeysCount() { + return splitKeys_.size(); + } + public com.google.protobuf.ByteString getSplitKeys(int index) { + return splitKeys_.get(index); + } + + private void initFields() { + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + splitKeys_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableSchema()) { + memoizedIsInitialized = 0; + return false; + } + if (!getTableSchema().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, tableSchema_); + } + for (int i = 0; i < splitKeys_.size(); i++) { + output.writeBytes(2, splitKeys_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, tableSchema_); + } + { + int dataSize = 0; + for (int i = 0; i < splitKeys_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(splitKeys_.get(i)); + } + size += dataSize; + size += 1 * getSplitKeysList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest) obj; + + boolean result = true; + result = result && (hasTableSchema() == other.hasTableSchema()); + if (hasTableSchema()) { + result = result && getTableSchema() + .equals(other.getTableSchema()); + } + result = result && getSplitKeysList() + .equals(other.getSplitKeysList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableSchema()) { + hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getTableSchema().hashCode(); + } + if (getSplitKeysCount() > 0) { + hash = (37 * hash) + SPLITKEYS_FIELD_NUMBER; + hash = (53 * hash) + getSplitKeysList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getTableSchemaFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (tableSchemaBuilder_ == null) { + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + } else { + tableSchemaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + splitKeys_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (tableSchemaBuilder_ == null) { + result.tableSchema_ = tableSchema_; + } else { + result.tableSchema_ = tableSchemaBuilder_.build(); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + splitKeys_ = java.util.Collections.unmodifiableList(splitKeys_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.splitKeys_ = splitKeys_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance()) return this; + if (other.hasTableSchema()) { + mergeTableSchema(other.getTableSchema()); + } + if (!other.splitKeys_.isEmpty()) { + if (splitKeys_.isEmpty()) { + splitKeys_ = other.splitKeys_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureSplitKeysIsMutable(); + splitKeys_.addAll(other.splitKeys_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableSchema()) { + + return false; + } + if (!getTableSchema().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); + if (hasTableSchema()) { + subBuilder.mergeFrom(getTableSchema()); + } + input.readMessage(subBuilder, extensionRegistry); + setTableSchema(subBuilder.buildPartial()); + break; + } + case 18: { + ensureSplitKeysIsMutable(); + splitKeys_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required .TableSchema tableSchema = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; + public boolean hasTableSchema() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { + if (tableSchemaBuilder_ == null) { + return tableSchema_; + } else { + return tableSchemaBuilder_.getMessage(); + } + } + public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tableSchema_ = value; + onChanged(); + } else { + tableSchemaBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setTableSchema( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { + if (tableSchemaBuilder_ == null) { + tableSchema_ = builderForValue.build(); + onChanged(); + } else { + tableSchemaBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { + tableSchema_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); + } else { + tableSchema_ = value; + } + onChanged(); + } else { + tableSchemaBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearTableSchema() { + if (tableSchemaBuilder_ == null) { + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + onChanged(); + } else { + tableSchemaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getTableSchemaFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { + if (tableSchemaBuilder_ != null) { + return tableSchemaBuilder_.getMessageOrBuilder(); + } else { + return tableSchema_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> + getTableSchemaFieldBuilder() { + if (tableSchemaBuilder_ == null) { + tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( + tableSchema_, + getParentForChildren(), + isClean()); + tableSchema_ = null; + } + return tableSchemaBuilder_; + } + + // repeated bytes splitKeys = 2; + private java.util.List splitKeys_ = java.util.Collections.emptyList();; + private void ensureSplitKeysIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + splitKeys_ = new java.util.ArrayList(splitKeys_); + bitField0_ |= 0x00000002; + } + } + public java.util.List + getSplitKeysList() { + return java.util.Collections.unmodifiableList(splitKeys_); + } + public int getSplitKeysCount() { + return splitKeys_.size(); + } + public com.google.protobuf.ByteString getSplitKeys(int index) { + return splitKeys_.get(index); + } + public Builder setSplitKeys( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSplitKeysIsMutable(); + splitKeys_.set(index, value); + onChanged(); + return this; + } + public Builder addSplitKeys(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSplitKeysIsMutable(); + splitKeys_.add(value); + onChanged(); + return this; + } + public Builder addAllSplitKeys( + java.lang.Iterable values) { + ensureSplitKeysIsMutable(); + super.addAll(values, splitKeys_); + onChanged(); + return this; + } + public Builder clearSplitKeys() { + splitKeys_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CreateTableRequest) + } + + static { + defaultInstance = new CreateTableRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CreateTableRequest) + } + + public interface CreateTableResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class CreateTableResponse extends + com.google.protobuf.GeneratedMessage + implements CreateTableResponseOrBuilder { + // Use CreateTableResponse.newBuilder() to construct. + private CreateTableResponse(Builder builder) { + super(builder); + } + private CreateTableResponse(boolean noInit) {} + + private static final CreateTableResponse defaultInstance; + public static CreateTableResponse getDefaultInstance() { + return defaultInstance; + } + + public CreateTableResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:CreateTableResponse) + } + + static { + defaultInstance = new CreateTableResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CreateTableResponse) + } + + public interface DeleteTableRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + } + public static final class DeleteTableRequest extends + com.google.protobuf.GeneratedMessage + implements DeleteTableRequestOrBuilder { + // Use DeleteTableRequest.newBuilder() to construct. + private DeleteTableRequest(Builder builder) { + super(builder); + } + private DeleteTableRequest(boolean noInit) {} + + private static final DeleteTableRequest defaultInstance; + public static DeleteTableRequest getDefaultInstance() { + return defaultInstance; + } + + public DeleteTableRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:DeleteTableRequest) + } + + static { + defaultInstance = new DeleteTableRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DeleteTableRequest) + } + + public interface DeleteTableResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class DeleteTableResponse extends + com.google.protobuf.GeneratedMessage + implements DeleteTableResponseOrBuilder { + // Use DeleteTableResponse.newBuilder() to construct. + private DeleteTableResponse(Builder builder) { + super(builder); + } + private DeleteTableResponse(boolean noInit) {} + + private static final DeleteTableResponse defaultInstance; + public static DeleteTableResponse getDefaultInstance() { + return defaultInstance; + } + + public DeleteTableResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:DeleteTableResponse) + } + + static { + defaultInstance = new DeleteTableResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DeleteTableResponse) + } + + public interface EnableTableRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + } + public static final class EnableTableRequest extends + com.google.protobuf.GeneratedMessage + implements EnableTableRequestOrBuilder { + // Use EnableTableRequest.newBuilder() to construct. + private EnableTableRequest(Builder builder) { + super(builder); + } + private EnableTableRequest(boolean noInit) {} + + private static final EnableTableRequest defaultInstance; + public static EnableTableRequest getDefaultInstance() { + return defaultInstance; + } + + public EnableTableRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:EnableTableRequest) + } + + static { + defaultInstance = new EnableTableRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:EnableTableRequest) + } + + public interface EnableTableResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class EnableTableResponse extends + com.google.protobuf.GeneratedMessage + implements EnableTableResponseOrBuilder { + // Use EnableTableResponse.newBuilder() to construct. + private EnableTableResponse(Builder builder) { + super(builder); + } + private EnableTableResponse(boolean noInit) {} + + private static final EnableTableResponse defaultInstance; + public static EnableTableResponse getDefaultInstance() { + return defaultInstance; + } + + public EnableTableResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:EnableTableResponse) + } + + static { + defaultInstance = new EnableTableResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:EnableTableResponse) + } + + public interface DisableTableRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + } + public static final class DisableTableRequest extends + com.google.protobuf.GeneratedMessage + implements DisableTableRequestOrBuilder { + // Use DisableTableRequest.newBuilder() to construct. + private DisableTableRequest(Builder builder) { + super(builder); + } + private DisableTableRequest(boolean noInit) {} + + private static final DisableTableRequest defaultInstance; + public static DisableTableRequest getDefaultInstance() { + return defaultInstance; + } + + public DisableTableRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:DisableTableRequest) + } + + static { + defaultInstance = new DisableTableRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DisableTableRequest) + } + + public interface DisableTableResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class DisableTableResponse extends + com.google.protobuf.GeneratedMessage + implements DisableTableResponseOrBuilder { + // Use DisableTableResponse.newBuilder() to construct. + private DisableTableResponse(Builder builder) { + super(builder); + } + private DisableTableResponse(boolean noInit) {} + + private static final DisableTableResponse defaultInstance; + public static DisableTableResponse getDefaultInstance() { + return defaultInstance; + } + + public DisableTableResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:DisableTableResponse) + } + + static { + defaultInstance = new DisableTableResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DisableTableResponse) + } + + public interface ModifyTableRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required .TableSchema tableSchema = 2; + boolean hasTableSchema(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(); + } + public static final class ModifyTableRequest extends + com.google.protobuf.GeneratedMessage + implements ModifyTableRequestOrBuilder { + // Use ModifyTableRequest.newBuilder() to construct. + private ModifyTableRequest(Builder builder) { + super(builder); + } + private ModifyTableRequest(boolean noInit) {} + + private static final ModifyTableRequest defaultInstance; + public static ModifyTableRequest getDefaultInstance() { + return defaultInstance; + } + + public ModifyTableRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required .TableSchema tableSchema = 2; + public static final int TABLESCHEMA_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_; + public boolean hasTableSchema() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { + return tableSchema_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { + return tableSchema_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasTableSchema()) { + memoizedIsInitialized = 0; + return false; + } + if (!getTableSchema().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, tableSchema_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, tableSchema_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasTableSchema() == other.hasTableSchema()); + if (hasTableSchema()) { + result = result && getTableSchema() + .equals(other.getTableSchema()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasTableSchema()) { + hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getTableSchema().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getTableSchemaFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (tableSchemaBuilder_ == null) { + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + } else { + tableSchemaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (tableSchemaBuilder_ == null) { + result.tableSchema_ = tableSchema_; + } else { + result.tableSchema_ = tableSchemaBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasTableSchema()) { + mergeTableSchema(other.getTableSchema()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + if (!hasTableSchema()) { + + return false; + } + if (!getTableSchema().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); + if (hasTableSchema()) { + subBuilder.mergeFrom(getTableSchema()); + } + input.readMessage(subBuilder, extensionRegistry); + setTableSchema(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required .TableSchema tableSchema = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; + public boolean hasTableSchema() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() { + if (tableSchemaBuilder_ == null) { + return tableSchema_; + } else { + return tableSchemaBuilder_.getMessage(); + } + } + public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tableSchema_ = value; + onChanged(); + } else { + tableSchemaBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setTableSchema( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { + if (tableSchemaBuilder_ == null) { + tableSchema_ = builderForValue.build(); + onChanged(); + } else { + tableSchemaBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) { + tableSchema_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial(); + } else { + tableSchema_ = value; + } + onChanged(); + } else { + tableSchemaBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearTableSchema() { + if (tableSchemaBuilder_ == null) { + tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); + onChanged(); + } else { + tableSchemaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getTableSchemaFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() { + if (tableSchemaBuilder_ != null) { + return tableSchemaBuilder_.getMessageOrBuilder(); + } else { + return tableSchema_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> + getTableSchemaFieldBuilder() { + if (tableSchemaBuilder_ == null) { + tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( + tableSchema_, + getParentForChildren(), + isClean()); + tableSchema_ = null; + } + return tableSchemaBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ModifyTableRequest) + } + + static { + defaultInstance = new ModifyTableRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ModifyTableRequest) + } + + public interface ModifyTableResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class ModifyTableResponse extends + com.google.protobuf.GeneratedMessage + implements ModifyTableResponseOrBuilder { + // Use ModifyTableResponse.newBuilder() to construct. + private ModifyTableResponse(Builder builder) { + super(builder); + } + private ModifyTableResponse(boolean noInit) {} + + private static final ModifyTableResponse defaultInstance; + public static ModifyTableResponse getDefaultInstance() { + return defaultInstance; + } + + public ModifyTableResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:ModifyTableResponse) + } + + static { + defaultInstance = new ModifyTableResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ModifyTableResponse) + } + public interface IsMasterRunningRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } @@ -5931,11 +12943,1857 @@ public final class MasterProtos { // @@protoc_insertion_point(class_scope:SetBalancerRunningResponse) } + public interface GetSchemaAlterStatusRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + } + public static final class GetSchemaAlterStatusRequest extends + com.google.protobuf.GeneratedMessage + implements GetSchemaAlterStatusRequestOrBuilder { + // Use GetSchemaAlterStatusRequest.newBuilder() to construct. + private GetSchemaAlterStatusRequest(Builder builder) { + super(builder); + } + private GetSchemaAlterStatusRequest(boolean noInit) {} + + private static final GetSchemaAlterStatusRequest defaultInstance; + public static GetSchemaAlterStatusRequest getDefaultInstance() { + return defaultInstance; + } + + public GetSchemaAlterStatusRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusRequest) + } + + static { + defaultInstance = new GetSchemaAlterStatusRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusRequest) + } + + public interface GetSchemaAlterStatusResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional uint32 yetToUpdateRegions = 1; + boolean hasYetToUpdateRegions(); + int getYetToUpdateRegions(); + + // optional uint32 totalRegions = 2; + boolean hasTotalRegions(); + int getTotalRegions(); + } + public static final class GetSchemaAlterStatusResponse extends + com.google.protobuf.GeneratedMessage + implements GetSchemaAlterStatusResponseOrBuilder { + // Use GetSchemaAlterStatusResponse.newBuilder() to construct. + private GetSchemaAlterStatusResponse(Builder builder) { + super(builder); + } + private GetSchemaAlterStatusResponse(boolean noInit) {} + + private static final GetSchemaAlterStatusResponse defaultInstance; + public static GetSchemaAlterStatusResponse getDefaultInstance() { + return defaultInstance; + } + + public GetSchemaAlterStatusResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional uint32 yetToUpdateRegions = 1; + public static final int YETTOUPDATEREGIONS_FIELD_NUMBER = 1; + private int yetToUpdateRegions_; + public boolean hasYetToUpdateRegions() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getYetToUpdateRegions() { + return yetToUpdateRegions_; + } + + // optional uint32 totalRegions = 2; + public static final int TOTALREGIONS_FIELD_NUMBER = 2; + private int totalRegions_; + public boolean hasTotalRegions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTotalRegions() { + return totalRegions_; + } + + private void initFields() { + yetToUpdateRegions_ = 0; + totalRegions_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(1, yetToUpdateRegions_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, totalRegions_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, yetToUpdateRegions_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, totalRegions_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) obj; + + boolean result = true; + result = result && (hasYetToUpdateRegions() == other.hasYetToUpdateRegions()); + if (hasYetToUpdateRegions()) { + result = result && (getYetToUpdateRegions() + == other.getYetToUpdateRegions()); + } + result = result && (hasTotalRegions() == other.hasTotalRegions()); + if (hasTotalRegions()) { + result = result && (getTotalRegions() + == other.getTotalRegions()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasYetToUpdateRegions()) { + hash = (37 * hash) + YETTOUPDATEREGIONS_FIELD_NUMBER; + hash = (53 * hash) + getYetToUpdateRegions(); + } + if (hasTotalRegions()) { + hash = (37 * hash) + TOTALREGIONS_FIELD_NUMBER; + hash = (53 * hash) + getTotalRegions(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + yetToUpdateRegions_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + totalRegions_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.yetToUpdateRegions_ = yetToUpdateRegions_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.totalRegions_ = totalRegions_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance()) return this; + if (other.hasYetToUpdateRegions()) { + setYetToUpdateRegions(other.getYetToUpdateRegions()); + } + if (other.hasTotalRegions()) { + setTotalRegions(other.getTotalRegions()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + yetToUpdateRegions_ = input.readUInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + totalRegions_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // optional uint32 yetToUpdateRegions = 1; + private int yetToUpdateRegions_ ; + public boolean hasYetToUpdateRegions() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getYetToUpdateRegions() { + return yetToUpdateRegions_; + } + public Builder setYetToUpdateRegions(int value) { + bitField0_ |= 0x00000001; + yetToUpdateRegions_ = value; + onChanged(); + return this; + } + public Builder clearYetToUpdateRegions() { + bitField0_ = (bitField0_ & ~0x00000001); + yetToUpdateRegions_ = 0; + onChanged(); + return this; + } + + // optional uint32 totalRegions = 2; + private int totalRegions_ ; + public boolean hasTotalRegions() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTotalRegions() { + return totalRegions_; + } + public Builder setTotalRegions(int value) { + bitField0_ |= 0x00000002; + totalRegions_ = value; + onChanged(); + return this; + } + public Builder clearTotalRegions() { + bitField0_ = (bitField0_ & ~0x00000002); + totalRegions_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusResponse) + } + + static { + defaultInstance = new GetSchemaAlterStatusResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusResponse) + } + + public interface GetTableDescriptorsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated string tableNames = 1; + java.util.List getTableNamesList(); + int getTableNamesCount(); + String getTableNames(int index); + } + public static final class GetTableDescriptorsRequest extends + com.google.protobuf.GeneratedMessage + implements GetTableDescriptorsRequestOrBuilder { + // Use GetTableDescriptorsRequest.newBuilder() to construct. + private GetTableDescriptorsRequest(Builder builder) { + super(builder); + } + private GetTableDescriptorsRequest(boolean noInit) {} + + private static final GetTableDescriptorsRequest defaultInstance; + public static GetTableDescriptorsRequest getDefaultInstance() { + return defaultInstance; + } + + public GetTableDescriptorsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable; + } + + // repeated string tableNames = 1; + public static final int TABLENAMES_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList tableNames_; + public java.util.List + getTableNamesList() { + return tableNames_; + } + public int getTableNamesCount() { + return tableNames_.size(); + } + public String getTableNames(int index) { + return tableNames_.get(index); + } + + private void initFields() { + tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < tableNames_.size(); i++) { + output.writeBytes(1, tableNames_.getByteString(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < tableNames_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(tableNames_.getByteString(i)); + } + size += dataSize; + size += 1 * getTableNamesList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest) obj; + + boolean result = true; + result = result && getTableNamesList() + .equals(other.getTableNamesList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getTableNamesCount() > 0) { + hash = (37 * hash) + TABLENAMES_FIELD_NUMBER; + hash = (53 * hash) + getTableNamesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + tableNames_ = new com.google.protobuf.UnmodifiableLazyStringList( + tableNames_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.tableNames_ = tableNames_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance()) return this; + if (!other.tableNames_.isEmpty()) { + if (tableNames_.isEmpty()) { + tableNames_ = other.tableNames_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureTableNamesIsMutable(); + tableNames_.addAll(other.tableNames_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureTableNamesIsMutable(); + tableNames_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated string tableNames = 1; + private com.google.protobuf.LazyStringList tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureTableNamesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + tableNames_ = new com.google.protobuf.LazyStringArrayList(tableNames_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getTableNamesList() { + return java.util.Collections.unmodifiableList(tableNames_); + } + public int getTableNamesCount() { + return tableNames_.size(); + } + public String getTableNames(int index) { + return tableNames_.get(index); + } + public Builder setTableNames( + int index, String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureTableNamesIsMutable(); + tableNames_.set(index, value); + onChanged(); + return this; + } + public Builder addTableNames(String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureTableNamesIsMutable(); + tableNames_.add(value); + onChanged(); + return this; + } + public Builder addAllTableNames( + java.lang.Iterable values) { + ensureTableNamesIsMutable(); + super.addAll(values, tableNames_); + onChanged(); + return this; + } + public Builder clearTableNames() { + tableNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + void addTableNames(com.google.protobuf.ByteString value) { + ensureTableNamesIsMutable(); + tableNames_.add(value); + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:GetTableDescriptorsRequest) + } + + static { + defaultInstance = new GetTableDescriptorsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetTableDescriptorsRequest) + } + + public interface GetTableDescriptorsResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .TableSchema tableSchema = 1; + java.util.List + getTableSchemaList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index); + int getTableSchemaCount(); + java.util.List + getTableSchemaOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( + int index); + } + public static final class GetTableDescriptorsResponse extends + com.google.protobuf.GeneratedMessage + implements GetTableDescriptorsResponseOrBuilder { + // Use GetTableDescriptorsResponse.newBuilder() to construct. + private GetTableDescriptorsResponse(Builder builder) { + super(builder); + } + private GetTableDescriptorsResponse(boolean noInit) {} + + private static final GetTableDescriptorsResponse defaultInstance; + public static GetTableDescriptorsResponse getDefaultInstance() { + return defaultInstance; + } + + public GetTableDescriptorsResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable; + } + + // repeated .TableSchema tableSchema = 1; + public static final int TABLESCHEMA_FIELD_NUMBER = 1; + private java.util.List tableSchema_; + public java.util.List getTableSchemaList() { + return tableSchema_; + } + public java.util.List + getTableSchemaOrBuilderList() { + return tableSchema_; + } + public int getTableSchemaCount() { + return tableSchema_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { + return tableSchema_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( + int index) { + return tableSchema_.get(index); + } + + private void initFields() { + tableSchema_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getTableSchemaCount(); i++) { + if (!getTableSchema(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < tableSchema_.size(); i++) { + output.writeMessage(1, tableSchema_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < tableSchema_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, tableSchema_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) obj; + + boolean result = true; + result = result && getTableSchemaList() + .equals(other.getTableSchemaList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getTableSchemaCount() > 0) { + hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getTableSchemaList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getTableSchemaFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (tableSchemaBuilder_ == null) { + tableSchema_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + tableSchemaBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse(this); + int from_bitField0_ = bitField0_; + if (tableSchemaBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.tableSchema_ = tableSchema_; + } else { + result.tableSchema_ = tableSchemaBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance()) return this; + if (tableSchemaBuilder_ == null) { + if (!other.tableSchema_.isEmpty()) { + if (tableSchema_.isEmpty()) { + tableSchema_ = other.tableSchema_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureTableSchemaIsMutable(); + tableSchema_.addAll(other.tableSchema_); + } + onChanged(); + } + } else { + if (!other.tableSchema_.isEmpty()) { + if (tableSchemaBuilder_.isEmpty()) { + tableSchemaBuilder_.dispose(); + tableSchemaBuilder_ = null; + tableSchema_ = other.tableSchema_; + bitField0_ = (bitField0_ & ~0x00000001); + tableSchemaBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getTableSchemaFieldBuilder() : null; + } else { + tableSchemaBuilder_.addAllMessages(other.tableSchema_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getTableSchemaCount(); i++) { + if (!getTableSchema(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addTableSchema(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .TableSchema tableSchema = 1; + private java.util.List tableSchema_ = + java.util.Collections.emptyList(); + private void ensureTableSchemaIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + tableSchema_ = new java.util.ArrayList(tableSchema_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_; + + public java.util.List getTableSchemaList() { + if (tableSchemaBuilder_ == null) { + return java.util.Collections.unmodifiableList(tableSchema_); + } else { + return tableSchemaBuilder_.getMessageList(); + } + } + public int getTableSchemaCount() { + if (tableSchemaBuilder_ == null) { + return tableSchema_.size(); + } else { + return tableSchemaBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) { + if (tableSchemaBuilder_ == null) { + return tableSchema_.get(index); + } else { + return tableSchemaBuilder_.getMessage(index); + } + } + public Builder setTableSchema( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTableSchemaIsMutable(); + tableSchema_.set(index, value); + onChanged(); + } else { + tableSchemaBuilder_.setMessage(index, value); + } + return this; + } + public Builder setTableSchema( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { + if (tableSchemaBuilder_ == null) { + ensureTableSchemaIsMutable(); + tableSchema_.set(index, builderForValue.build()); + onChanged(); + } else { + tableSchemaBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTableSchemaIsMutable(); + tableSchema_.add(value); + onChanged(); + } else { + tableSchemaBuilder_.addMessage(value); + } + return this; + } + public Builder addTableSchema( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) { + if (tableSchemaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTableSchemaIsMutable(); + tableSchema_.add(index, value); + onChanged(); + } else { + tableSchemaBuilder_.addMessage(index, value); + } + return this; + } + public Builder addTableSchema( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { + if (tableSchemaBuilder_ == null) { + ensureTableSchemaIsMutable(); + tableSchema_.add(builderForValue.build()); + onChanged(); + } else { + tableSchemaBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addTableSchema( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) { + if (tableSchemaBuilder_ == null) { + ensureTableSchemaIsMutable(); + tableSchema_.add(index, builderForValue.build()); + onChanged(); + } else { + tableSchemaBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllTableSchema( + java.lang.Iterable values) { + if (tableSchemaBuilder_ == null) { + ensureTableSchemaIsMutable(); + super.addAll(values, tableSchema_); + onChanged(); + } else { + tableSchemaBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearTableSchema() { + if (tableSchemaBuilder_ == null) { + tableSchema_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + tableSchemaBuilder_.clear(); + } + return this; + } + public Builder removeTableSchema(int index) { + if (tableSchemaBuilder_ == null) { + ensureTableSchemaIsMutable(); + tableSchema_.remove(index); + onChanged(); + } else { + tableSchemaBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder( + int index) { + return getTableSchemaFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder( + int index) { + if (tableSchemaBuilder_ == null) { + return tableSchema_.get(index); } else { + return tableSchemaBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getTableSchemaOrBuilderList() { + if (tableSchemaBuilder_ != null) { + return tableSchemaBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(tableSchema_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder() { + return getTableSchemaFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder( + int index) { + return getTableSchemaFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()); + } + public java.util.List + getTableSchemaBuilderList() { + return getTableSchemaFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> + getTableSchemaFieldBuilder() { + if (tableSchemaBuilder_ == null) { + tableSchemaBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>( + tableSchema_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + tableSchema_ = null; + } + return tableSchemaBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetTableDescriptorsResponse) + } + + static { + defaultInstance = new GetTableDescriptorsResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetTableDescriptorsResponse) + } + public static abstract class MasterService implements com.google.protobuf.Service { protected MasterService() {} public interface Interface { + public abstract void addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request, + com.google.protobuf.RpcCallback done); + public abstract void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request, @@ -5951,6 +14809,36 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback done); + public abstract void offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request, + com.google.protobuf.RpcCallback done); + public abstract void isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, @@ -5976,12 +14864,46 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback done); + public abstract void getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request, + com.google.protobuf.RpcCallback done); + } public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new MasterService() { @java.lang.Override + public void addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request, + com.google.protobuf.RpcCallback done) { + impl.addColumn(controller, request, done); + } + + @java.lang.Override + public void deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request, + com.google.protobuf.RpcCallback done) { + impl.deleteColumn(controller, request, done); + } + + @java.lang.Override + public void modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request, + com.google.protobuf.RpcCallback done) { + impl.modifyColumn(controller, request, done); + } + + @java.lang.Override public void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request, @@ -6006,6 +14928,54 @@ public final class MasterProtos { } @java.lang.Override + public void offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.offlineRegion(controller, request, done); + } + + @java.lang.Override + public void deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request, + com.google.protobuf.RpcCallback done) { + impl.deleteTable(controller, request, done); + } + + @java.lang.Override + public void enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request, + com.google.protobuf.RpcCallback done) { + impl.enableTable(controller, request, done); + } + + @java.lang.Override + public void disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request, + com.google.protobuf.RpcCallback done) { + impl.disableTable(controller, request, done); + } + + @java.lang.Override + public void modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request, + com.google.protobuf.RpcCallback done) { + impl.modifyTable(controller, request, done); + } + + @java.lang.Override + public void createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request, + com.google.protobuf.RpcCallback done) { + impl.createTable(controller, request, done); + } + + @java.lang.Override public void isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, @@ -6045,6 +15015,22 @@ public final class MasterProtos { impl.setBalancerRunning(controller, request, done); } + @java.lang.Override + public void getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request, + com.google.protobuf.RpcCallback done) { + impl.getSchemaAlterStatus(controller, request, done); + } + + @java.lang.Override + public void getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request, + com.google.protobuf.RpcCallback done) { + impl.getTableDescriptors(controller, request, done); + } + }; } @@ -6068,21 +15054,43 @@ public final class MasterProtos { } switch(method.getIndex()) { case 0: - return impl.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest)request); + return impl.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)request); case 1: - return impl.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest)request); + return impl.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)request); case 2: - return impl.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest)request); + return impl.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)request); case 3: - return impl.isMasterRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)request); + return impl.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest)request); case 4: - return impl.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest)request); + return impl.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest)request); case 5: - return impl.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest)request); + return impl.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest)request); case 6: - return impl.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest)request); + return impl.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)request); case 7: + return impl.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)request); + case 8: + return impl.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)request); + case 9: + return impl.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)request); + case 10: + return impl.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)request); + case 11: + return impl.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)request); + case 12: + return impl.isMasterRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)request); + case 13: + return impl.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest)request); + case 14: + return impl.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest)request); + case 15: + return impl.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest)request); + case 16: return impl.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest)request); + case 17: + return impl.getSchemaAlterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)request); + case 18: + return impl.getTableDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } @@ -6098,21 +15106,43 @@ public final class MasterProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance(); case 7: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance(); + case 12: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); + case 13: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance(); + case 14: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance(); + case 15: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance(); + case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.getDefaultInstance(); + case 17: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); + case 18: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -6128,21 +15158,43 @@ public final class MasterProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance(); case 7: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance(); + case 12: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); + case 13: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance(); + case 14: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance(); + case 15: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance(); + case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance(); + case 17: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); + case 18: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -6151,6 +15203,21 @@ public final class MasterProtos { }; } + public abstract void addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request, + com.google.protobuf.RpcCallback done); + public abstract void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request, @@ -6166,6 +15233,36 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback done); + public abstract void offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request, + com.google.protobuf.RpcCallback done); + public abstract void isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, @@ -6191,6 +15288,16 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback done); + public abstract void getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request, + com.google.protobuf.RpcCallback done); + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -6214,45 +15321,100 @@ public final class MasterProtos { } switch(method.getIndex()) { case 0: + this.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: this.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest)request, com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 1: + case 4: this.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest)request, com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 2: + case 5: this.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest)request, com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 3: + case 6: + this.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 7: + this.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 8: + this.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 9: + this.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 10: + this.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 11: + this.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 12: this.isMasterRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)request, com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 4: + case 13: this.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest)request, com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 5: + case 14: this.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest)request, com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 6: + case 15: this.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest)request, com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 7: + case 16: this.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest)request, com.google.protobuf.RpcUtil.specializeCallback( done)); return; + case 17: + this.getSchemaAlterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 18: + this.getTableDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; default: throw new java.lang.AssertionError("Can't get here."); } @@ -6268,21 +15430,43 @@ public final class MasterProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance(); case 7: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance(); + case 12: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance(); + case 13: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance(); + case 14: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance(); + case 15: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance(); + case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.getDefaultInstance(); + case 17: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance(); + case 18: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -6298,21 +15482,43 @@ public final class MasterProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance(); case 1: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance(); case 3: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance(); case 6: - return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance(); case 7: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance(); + case 12: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(); + case 13: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance(); + case 14: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance(); + case 15: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance(); + case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance(); + case 17: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance(); + case 18: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -6334,12 +15540,57 @@ public final class MasterProtos { return channel; } + public void addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance())); + } + + public void deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance())); + } + + public void modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance())); + } + public void moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(0), + getDescriptor().getMethods().get(3), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance(), @@ -6354,7 +15605,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(1), + getDescriptor().getMethods().get(4), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance(), @@ -6369,7 +15620,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(2), + getDescriptor().getMethods().get(5), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance(), @@ -6379,12 +15630,102 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance())); } + public void offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance())); + } + + public void deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance())); + } + + public void enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance())); + } + + public void disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(9), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance())); + } + + public void modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(10), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance())); + } + + public void createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(11), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance())); + } + public void isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(3), + getDescriptor().getMethods().get(12), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(), @@ -6399,7 +15740,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request, com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(4), + getDescriptor().getMethods().get(13), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance(), @@ -6414,7 +15755,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request, com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(5), + getDescriptor().getMethods().get(14), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance(), @@ -6429,7 +15770,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request, com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(6), + getDescriptor().getMethods().get(15), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance(), @@ -6444,7 +15785,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(7), + getDescriptor().getMethods().get(16), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance(), @@ -6453,6 +15794,36 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance())); } + + public void getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(17), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance())); + } + + public void getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(18), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance())); + } } public static BlockingInterface newBlockingStub( @@ -6461,6 +15832,21 @@ public final class MasterProtos { } public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request) + throws com.google.protobuf.ServiceException; + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request) @@ -6476,6 +15862,36 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request) throws com.google.protobuf.ServiceException; + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request) + throws com.google.protobuf.ServiceException; + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request) @@ -6500,6 +15916,16 @@ public final class MasterProtos { com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request) throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request) + throws com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { @@ -6509,12 +15935,48 @@ public final class MasterProtos { private final com.google.protobuf.BlockingRpcChannel channel; + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse addColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse deleteColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse modifyColumn( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance()); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), + getDescriptor().getMethods().get(3), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance()); @@ -6526,7 +15988,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), + getDescriptor().getMethods().get(4), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance()); @@ -6538,19 +16000,91 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), + getDescriptor().getMethods().get(5), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance()); } + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse offlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse deleteTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse enableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse disableTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(9), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse modifyTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(10), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse createTable( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(11), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance()); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(3), + getDescriptor().getMethods().get(12), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance()); @@ -6562,7 +16096,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(4), + getDescriptor().getMethods().get(13), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance()); @@ -6574,7 +16108,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(5), + getDescriptor().getMethods().get(14), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance()); @@ -6586,7 +16120,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(6), + getDescriptor().getMethods().get(15), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance()); @@ -6598,16 +16132,70 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(7), + getDescriptor().getMethods().get(16), controller, request, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance()); } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(17), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getTableDescriptors( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(18), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance()); + } + } } private static com.google.protobuf.Descriptors.Descriptor + internal_static_AddColumnRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AddColumnRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AddColumnResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AddColumnResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DeleteColumnRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DeleteColumnRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DeleteColumnResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DeleteColumnResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ModifyColumnRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ModifyColumnRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ModifyColumnResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ModifyColumnResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_MoveRegionRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -6638,6 +16226,66 @@ public final class MasterProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_UnassignRegionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor + internal_static_OfflineRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_OfflineRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_OfflineRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_OfflineRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CreateTableRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CreateTableRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CreateTableResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CreateTableResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DeleteTableRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DeleteTableRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DeleteTableResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DeleteTableResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_EnableTableRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_EnableTableRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_EnableTableResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_EnableTableResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DisableTableRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DisableTableRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DisableTableResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DisableTableResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ModifyTableRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ModifyTableRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ModifyTableResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ModifyTableResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_IsMasterRunningRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -6687,6 +16335,26 @@ public final class MasterProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SetBalancerRunningResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetSchemaAlterStatusRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetSchemaAlterStatusResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetTableDescriptorsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetTableDescriptorsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetTableDescriptorsResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetTableDescriptorsResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -6696,45 +16364,136 @@ public final class MasterProtos { descriptor; static { java.lang.String[] descriptorData = { - "\n\014Master.proto\032\013hbase.proto\"Z\n\021MoveRegio" + - "nRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + - "er\022#\n\016destServerName\030\002 \001(\0132\013.ServerName\"" + - "\024\n\022MoveRegionResponse\"7\n\023AssignRegionReq" + - "uest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\"\026" + - "\n\024AssignRegionResponse\"O\n\025UnassignRegion" + - "Request\022 \n\006region\030\001 \002(\0132\020.RegionSpecifie" + - "r\022\024\n\005force\030\002 \001(\010:\005false\"\030\n\026UnassignRegio" + - "nResponse\"\030\n\026IsMasterRunningRequest\"2\n\027I" + - "sMasterRunningResponse\022\027\n\017isMasterRunnin", - "g\030\001 \002(\010\"\021\n\017ShutdownRequest\"\022\n\020ShutdownRe" + - "sponse\"\023\n\021StopMasterRequest\"\024\n\022StopMaste" + - "rResponse\"\020\n\016BalanceRequest\"&\n\017BalanceRe" + - "sponse\022\023\n\013balancerRan\030\001 \002(\010\"<\n\031SetBalanc" + - "erRunningRequest\022\n\n\002on\030\001 \002(\010\022\023\n\013synchron" + - "ous\030\002 \001(\010\"6\n\032SetBalancerRunningResponse\022" + - "\030\n\020prevBalanceValue\030\001 \001(\0102\361\003\n\rMasterServ" + - "ice\0225\n\nmoveRegion\022\022.MoveRegionRequest\032\023." + - "MoveRegionResponse\022;\n\014assignRegion\022\024.Ass" + - "ignRegionRequest\032\025.AssignRegionResponse\022", - "A\n\016unassignRegion\022\026.UnassignRegionReques" + - "t\032\027.UnassignRegionResponse\022D\n\017isMasterRu" + - "nning\022\027.IsMasterRunningRequest\032\030.IsMaste" + - "rRunningResponse\022/\n\010shutdown\022\020.ShutdownR" + - "equest\032\021.ShutdownResponse\0225\n\nstopMaster\022" + - "\022.StopMasterRequest\032\023.StopMasterResponse" + - "\022,\n\007balance\022\017.BalanceRequest\032\020.BalanceRe" + - "sponse\022M\n\022setBalancerRunning\022\032.SetBalanc" + - "erRunningRequest\032\033.SetBalancerRunningRes" + - "ponseBB\n*org.apache.hadoop.hbase.protobu", - "f.generatedB\014MasterProtosH\001\210\001\001\240\001\001" + "\n\014Master.proto\032\013hbase.proto\"R\n\020AddColumn" + + "Request\022\021\n\ttableName\030\001 \002(\014\022+\n\016columnFami" + + "lies\030\002 \002(\0132\023.ColumnFamilySchema\"\023\n\021AddCo" + + "lumnResponse\"<\n\023DeleteColumnRequest\022\021\n\tt" + + "ableName\030\001 \002(\014\022\022\n\ncolumnName\030\002 \002(\014\"\026\n\024De" + + "leteColumnResponse\"U\n\023ModifyColumnReques" + + "t\022\021\n\ttableName\030\001 \002(\014\022+\n\016columnFamilies\030\002" + + " \002(\0132\023.ColumnFamilySchema\"\026\n\024ModifyColum" + + "nResponse\"Z\n\021MoveRegionRequest\022 \n\006region" + + "\030\001 \002(\0132\020.RegionSpecifier\022#\n\016destServerNa", + "me\030\002 \001(\0132\013.ServerName\"\024\n\022MoveRegionRespo" + + "nse\"7\n\023AssignRegionRequest\022 \n\006region\030\001 \002" + + "(\0132\020.RegionSpecifier\"\026\n\024AssignRegionResp" + + "onse\"O\n\025UnassignRegionRequest\022 \n\006region\030" + + "\001 \002(\0132\020.RegionSpecifier\022\024\n\005force\030\002 \001(\010:\005" + + "false\"\030\n\026UnassignRegionResponse\"8\n\024Offli" + + "neRegionRequest\022 \n\006region\030\001 \002(\0132\020.Region" + + "Specifier\"\027\n\025OfflineRegionResponse\"J\n\022Cr" + + "eateTableRequest\022!\n\013tableSchema\030\001 \002(\0132\014." + + "TableSchema\022\021\n\tsplitKeys\030\002 \003(\014\"\025\n\023Create", + "TableResponse\"\'\n\022DeleteTableRequest\022\021\n\tt" + + "ableName\030\001 \002(\014\"\025\n\023DeleteTableResponse\"\'\n" + + "\022EnableTableRequest\022\021\n\ttableName\030\001 \002(\014\"\025" + + "\n\023EnableTableResponse\"(\n\023DisableTableReq" + + "uest\022\021\n\ttableName\030\001 \002(\014\"\026\n\024DisableTableR" + + "esponse\"J\n\022ModifyTableRequest\022\021\n\ttableNa" + + "me\030\001 \002(\014\022!\n\013tableSchema\030\002 \002(\0132\014.TableSch" + + "ema\"\025\n\023ModifyTableResponse\"\030\n\026IsMasterRu" + + "nningRequest\"2\n\027IsMasterRunningResponse\022" + + "\027\n\017isMasterRunning\030\001 \002(\010\"\021\n\017ShutdownRequ", + "est\"\022\n\020ShutdownResponse\"\023\n\021StopMasterReq" + + "uest\"\024\n\022StopMasterResponse\"\020\n\016BalanceReq" + + "uest\"&\n\017BalanceResponse\022\023\n\013balancerRan\030\001" + + " \002(\010\"<\n\031SetBalancerRunningRequest\022\n\n\002on\030" + + "\001 \002(\010\022\023\n\013synchronous\030\002 \001(\010\"6\n\032SetBalance" + + "rRunningResponse\022\030\n\020prevBalanceValue\030\001 \001" + + "(\010\"0\n\033GetSchemaAlterStatusRequest\022\021\n\ttab" + + "leName\030\001 \002(\014\"P\n\034GetSchemaAlterStatusResp" + + "onse\022\032\n\022yetToUpdateRegions\030\001 \001(\r\022\024\n\014tota" + + "lRegions\030\002 \001(\r\"0\n\032GetTableDescriptorsReq", + "uest\022\022\n\ntableNames\030\001 \003(\t\"@\n\033GetTableDesc" + + "riptorsResponse\022!\n\013tableSchema\030\001 \003(\0132\014.T" + + "ableSchema2\253\t\n\rMasterService\0222\n\taddColum" + + "n\022\021.AddColumnRequest\032\022.AddColumnResponse" + + "\022;\n\014deleteColumn\022\024.DeleteColumnRequest\032\025" + + ".DeleteColumnResponse\022;\n\014modifyColumn\022\024." + + "ModifyColumnRequest\032\025.ModifyColumnRespon" + + "se\0225\n\nmoveRegion\022\022.MoveRegionRequest\032\023.M" + + "oveRegionResponse\022;\n\014assignRegion\022\024.Assi" + + "gnRegionRequest\032\025.AssignRegionResponse\022A", + "\n\016unassignRegion\022\026.UnassignRegionRequest" + + "\032\027.UnassignRegionResponse\022>\n\rofflineRegi" + + "on\022\025.OfflineRegionRequest\032\026.OfflineRegio" + + "nResponse\0228\n\013deleteTable\022\023.DeleteTableRe" + + "quest\032\024.DeleteTableResponse\0228\n\013enableTab" + + "le\022\023.EnableTableRequest\032\024.EnableTableRes" + + "ponse\022;\n\014disableTable\022\024.DisableTableRequ" + + "est\032\025.DisableTableResponse\0228\n\013modifyTabl" + + "e\022\023.ModifyTableRequest\032\024.ModifyTableResp" + + "onse\0228\n\013createTable\022\023.CreateTableRequest", + "\032\024.CreateTableResponse\022D\n\017isMasterRunnin" + + "g\022\027.IsMasterRunningRequest\032\030.IsMasterRun" + + "ningResponse\022/\n\010shutdown\022\020.ShutdownReque" + + "st\032\021.ShutdownResponse\0225\n\nstopMaster\022\022.St" + + "opMasterRequest\032\023.StopMasterResponse\022,\n\007" + + "balance\022\017.BalanceRequest\032\020.BalanceRespon" + + "se\022M\n\022setBalancerRunning\022\032.SetBalancerRu" + + "nningRequest\032\033.SetBalancerRunningRespons" + + "e\022S\n\024getSchemaAlterStatus\022\034.GetSchemaAlt" + + "erStatusRequest\032\035.GetSchemaAlterStatusRe", + "sponse\022P\n\023getTableDescriptors\022\033.GetTable" + + "DescriptorsRequest\032\034.GetTableDescriptors" + + "ResponseBB\n*org.apache.hadoop.hbase.prot" + + "obuf.generatedB\014MasterProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; - internal_static_MoveRegionRequest_descriptor = + internal_static_AddColumnRequest_descriptor = getDescriptor().getMessageTypes().get(0); + internal_static_AddColumnRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AddColumnRequest_descriptor, + new java.lang.String[] { "TableName", "ColumnFamilies", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.Builder.class); + internal_static_AddColumnResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_AddColumnResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AddColumnResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.Builder.class); + internal_static_DeleteColumnRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_DeleteColumnRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DeleteColumnRequest_descriptor, + new java.lang.String[] { "TableName", "ColumnName", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.Builder.class); + internal_static_DeleteColumnResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_DeleteColumnResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DeleteColumnResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.Builder.class); + internal_static_ModifyColumnRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_ModifyColumnRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ModifyColumnRequest_descriptor, + new java.lang.String[] { "TableName", "ColumnFamilies", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.Builder.class); + internal_static_ModifyColumnResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_ModifyColumnResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ModifyColumnResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.Builder.class); + internal_static_MoveRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(6); internal_static_MoveRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MoveRegionRequest_descriptor, @@ -6742,7 +16501,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.Builder.class); internal_static_MoveRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(1); + getDescriptor().getMessageTypes().get(7); internal_static_MoveRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MoveRegionResponse_descriptor, @@ -6750,7 +16509,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.Builder.class); internal_static_AssignRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(2); + getDescriptor().getMessageTypes().get(8); internal_static_AssignRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AssignRegionRequest_descriptor, @@ -6758,7 +16517,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.Builder.class); internal_static_AssignRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(3); + getDescriptor().getMessageTypes().get(9); internal_static_AssignRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AssignRegionResponse_descriptor, @@ -6766,7 +16525,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.Builder.class); internal_static_UnassignRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(4); + getDescriptor().getMessageTypes().get(10); internal_static_UnassignRegionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UnassignRegionRequest_descriptor, @@ -6774,15 +16533,111 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.Builder.class); internal_static_UnassignRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(11); internal_static_UnassignRegionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UnassignRegionResponse_descriptor, new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.Builder.class); + internal_static_OfflineRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_OfflineRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_OfflineRegionRequest_descriptor, + new java.lang.String[] { "Region", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.Builder.class); + internal_static_OfflineRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_OfflineRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_OfflineRegionResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.Builder.class); + internal_static_CreateTableRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_CreateTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CreateTableRequest_descriptor, + new java.lang.String[] { "TableSchema", "SplitKeys", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.Builder.class); + internal_static_CreateTableResponse_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_CreateTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CreateTableResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.Builder.class); + internal_static_DeleteTableRequest_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_DeleteTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DeleteTableRequest_descriptor, + new java.lang.String[] { "TableName", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.Builder.class); + internal_static_DeleteTableResponse_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_DeleteTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DeleteTableResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.Builder.class); + internal_static_EnableTableRequest_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_EnableTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_EnableTableRequest_descriptor, + new java.lang.String[] { "TableName", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.Builder.class); + internal_static_EnableTableResponse_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_EnableTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_EnableTableResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.Builder.class); + internal_static_DisableTableRequest_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_DisableTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DisableTableRequest_descriptor, + new java.lang.String[] { "TableName", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.Builder.class); + internal_static_DisableTableResponse_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_DisableTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DisableTableResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.Builder.class); + internal_static_ModifyTableRequest_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_ModifyTableRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ModifyTableRequest_descriptor, + new java.lang.String[] { "TableName", "TableSchema", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.Builder.class); + internal_static_ModifyTableResponse_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_ModifyTableResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ModifyTableResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.Builder.class); internal_static_IsMasterRunningRequest_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(24); internal_static_IsMasterRunningRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsMasterRunningRequest_descriptor, @@ -6790,7 +16645,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class); internal_static_IsMasterRunningResponse_descriptor = - getDescriptor().getMessageTypes().get(7); + getDescriptor().getMessageTypes().get(25); internal_static_IsMasterRunningResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsMasterRunningResponse_descriptor, @@ -6798,7 +16653,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class); internal_static_ShutdownRequest_descriptor = - getDescriptor().getMessageTypes().get(8); + getDescriptor().getMessageTypes().get(26); internal_static_ShutdownRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ShutdownRequest_descriptor, @@ -6806,7 +16661,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.Builder.class); internal_static_ShutdownResponse_descriptor = - getDescriptor().getMessageTypes().get(9); + getDescriptor().getMessageTypes().get(27); internal_static_ShutdownResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ShutdownResponse_descriptor, @@ -6814,7 +16669,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.Builder.class); internal_static_StopMasterRequest_descriptor = - getDescriptor().getMessageTypes().get(10); + getDescriptor().getMessageTypes().get(28); internal_static_StopMasterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopMasterRequest_descriptor, @@ -6822,7 +16677,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.Builder.class); internal_static_StopMasterResponse_descriptor = - getDescriptor().getMessageTypes().get(11); + getDescriptor().getMessageTypes().get(29); internal_static_StopMasterResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopMasterResponse_descriptor, @@ -6830,7 +16685,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.Builder.class); internal_static_BalanceRequest_descriptor = - getDescriptor().getMessageTypes().get(12); + getDescriptor().getMessageTypes().get(30); internal_static_BalanceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BalanceRequest_descriptor, @@ -6838,7 +16693,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.Builder.class); internal_static_BalanceResponse_descriptor = - getDescriptor().getMessageTypes().get(13); + getDescriptor().getMessageTypes().get(31); internal_static_BalanceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BalanceResponse_descriptor, @@ -6846,7 +16701,7 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.Builder.class); internal_static_SetBalancerRunningRequest_descriptor = - getDescriptor().getMessageTypes().get(14); + getDescriptor().getMessageTypes().get(32); internal_static_SetBalancerRunningRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SetBalancerRunningRequest_descriptor, @@ -6854,13 +16709,45 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.Builder.class); internal_static_SetBalancerRunningResponse_descriptor = - getDescriptor().getMessageTypes().get(15); + getDescriptor().getMessageTypes().get(33); internal_static_SetBalancerRunningResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SetBalancerRunningResponse_descriptor, new java.lang.String[] { "PrevBalanceValue", }, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.Builder.class); + internal_static_GetSchemaAlterStatusRequest_descriptor = + getDescriptor().getMessageTypes().get(34); + internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetSchemaAlterStatusRequest_descriptor, + new java.lang.String[] { "TableName", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.Builder.class); + internal_static_GetSchemaAlterStatusResponse_descriptor = + getDescriptor().getMessageTypes().get(35); + internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetSchemaAlterStatusResponse_descriptor, + new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.Builder.class); + internal_static_GetTableDescriptorsRequest_descriptor = + getDescriptor().getMessageTypes().get(36); + internal_static_GetTableDescriptorsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetTableDescriptorsRequest_descriptor, + new java.lang.String[] { "TableNames", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.Builder.class); + internal_static_GetTableDescriptorsResponse_descriptor = + getDescriptor().getMessageTypes().get(37); + internal_static_GetTableDescriptorsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetTableDescriptorsResponse_descriptor, + new java.lang.String[] { "TableSchema", }, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.Builder.class); return null; } }; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 4531f71..653866e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -1198,7 +1198,7 @@ public class HBaseFsck { // first time we assume the rs's supports #offline. try { LOG.info("Offlining region " + regionString); - admin.getMaster().offline(regionName); + admin.offline(regionName); } catch (IOException ioe) { String notFoundMsg = "java.lang.NoSuchMethodException: " + "org.apache.hadoop.hbase.master.HMaster.offline([B)"; diff --git a/hbase-server/src/main/protobuf/Master.proto b/hbase-server/src/main/protobuf/Master.proto index b0b9d91..1175daa 100644 --- a/hbase-server/src/main/protobuf/Master.proto +++ b/hbase-server/src/main/protobuf/Master.proto @@ -26,6 +26,32 @@ option optimize_for = SPEED; import "hbase.proto"; +/* Column-level protobufs */ + +message AddColumnRequest { + required bytes tableName = 1; + required ColumnFamilySchema columnFamilies = 2; +} + +message AddColumnResponse { +} + +message DeleteColumnRequest { + required bytes tableName = 1; + required bytes columnName = 2; +} + +message DeleteColumnResponse { +} + +message ModifyColumnRequest { + required bytes tableName = 1; + required ColumnFamilySchema columnFamilies = 2; +} + +message ModifyColumnResponse { +} + /* Region-level Protos */ message MoveRegionRequest { @@ -51,6 +77,52 @@ message UnassignRegionRequest { message UnassignRegionResponse { } +message OfflineRegionRequest { + required RegionSpecifier region = 1; +} + +message OfflineRegionResponse { +} + +/* Table-level protobufs */ + +message CreateTableRequest { + required TableSchema tableSchema = 1; + repeated bytes splitKeys = 2; +} + +message CreateTableResponse { +} + +message DeleteTableRequest { + required bytes tableName = 1; +} + +message DeleteTableResponse { +} + +message EnableTableRequest { + required bytes tableName = 1; +} + +message EnableTableResponse { +} + +message DisableTableRequest { + required bytes tableName = 1; +} + +message DisableTableResponse { +} + +message ModifyTableRequest { + required bytes tableName = 1; + required TableSchema tableSchema = 2; +} + +message ModifyTableResponse { +} + /* Cluster-level protobufs */ message IsMasterRunningRequest { @@ -88,7 +160,36 @@ message SetBalancerRunningResponse { optional bool prevBalanceValue = 1; } +message GetSchemaAlterStatusRequest { + required bytes tableName = 1; +} + +message GetSchemaAlterStatusResponse { + optional uint32 yetToUpdateRegions = 1; + optional uint32 totalRegions = 2; +} + +message GetTableDescriptorsRequest { + repeated string tableNames = 1; +} + +message GetTableDescriptorsResponse { + repeated TableSchema tableSchema = 1; +} + service MasterService { + /** Adds a column to the specified table. */ + rpc addColumn(AddColumnRequest) + returns(AddColumnResponse); + + /** Deletes a column from the specified table. Table must be disabled. */ + rpc deleteColumn(DeleteColumnRequest) + returns(DeleteColumnResponse); + + /** Modifies an existing column on the specified table. */ + rpc modifyColumn(ModifyColumnRequest) + returns(ModifyColumnResponse); + /** Move the region region to the destination server. */ rpc moveRegion(MoveRegionRequest) returns(MoveRegionResponse); @@ -106,6 +207,35 @@ service MasterService { rpc unassignRegion(UnassignRegionRequest) returns(UnassignRegionResponse); + /** + * Offline a region from the assignment manager's in-memory state. The + * region should be in a closed state and there will be no attempt to + * automatically reassign the region as in unassign. This is a special + * method, and should only be used by experts or hbck. + */ + rpc offlineRegion(OfflineRegionRequest) + returns(OfflineRegionResponse); + + /** Deletes a table */ + rpc deleteTable(DeleteTableRequest) + returns(DeleteTableResponse); + + /** Puts the table on-line (only needed if table has been previously taken offline) */ + rpc enableTable(EnableTableRequest) + returns(EnableTableResponse); + + /** Take table offline */ + rpc disableTable(DisableTableRequest) + returns(DisableTableResponse); + + /** Modify a table's metadata */ + rpc modifyTable(ModifyTableRequest) + returns(ModifyTableResponse); + + /** Creates a new table asynchronously */ + rpc createTable(CreateTableRequest) + returns(CreateTableResponse); + /** return true if master is available */ rpc isMasterRunning(IsMasterRunningRequest) returns(IsMasterRunningResponse); @@ -132,4 +262,12 @@ service MasterService { */ rpc setBalancerRunning(SetBalancerRunningRequest) returns(SetBalancerRunningResponse); -} \ No newline at end of file + + /** Used by the client to get the number of regions that have received the updated schema */ + rpc getSchemaAlterStatus(GetSchemaAlterStatusRequest) + returns(GetSchemaAlterStatusResponse); + + /** Get list of TableDescriptors for requested tables. */ + rpc getTableDescriptors(GetTableDescriptorsRequest) + returns(GetTableDescriptorsResponse); +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java index 068c09c..39e0d17 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java @@ -196,7 +196,7 @@ public class TestHBaseFsck { HRegionInfo hri) throws IOException, InterruptedException { try { HBaseFsckRepair.closeRegionSilentlyAndWait(admin, sn, hri); - admin.getMaster().offline(hri.getRegionName()); + admin.offline(hri.getRegionName()); } catch (IOException ioe) { LOG.warn("Got exception when attempting to offline region " + Bytes.toString(hri.getRegionName()), ioe);