diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java index ca7ab6a..46c775c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java @@ -402,8 +402,9 @@ public class HRegionInfo implements Comparable { * Gets the table name from the specified region name. * @param regionName * @return Table name. + * @see #getTable(byte[]) */ - public static TableName getTableName(byte[] regionName) { + public static byte [] getTableName(byte[] regionName) { int offset = -1; for (int i = 0; i < regionName.length; i++) { if (regionName[i] == HConstants.DELIMITER) { @@ -413,7 +414,18 @@ public class HRegionInfo implements Comparable { } byte[] buff = new byte[offset]; System.arraycopy(regionName, 0, buff, 0, offset); - return TableName.valueOf(buff); + return buff; + } + + /** + * Gets the table name from the specified region name. + * Like {@link #getTableName(byte[])} only returns a {@link TableName} rather than a byte array. + * @param regionName + * @return Table name + * @see #getTableName(byte[]) + */ + public static TableName getTable(final byte [] regionName) { + return TableName.valueOf(getTableName(regionName)); } /** @@ -523,12 +535,22 @@ public class HRegionInfo implements Comparable { /** * Get current table name of the region * @return byte array of table name + * @see #getTable() + */ + public byte [] getTableName() { + return getTable().toBytes(); + } + + /** + * Get current table name of the region + * @return TableName + * @see #getTableName() */ - public TableName getTableName() { + public TableName getTable() { if (tableName == null || tableName.getName().length == 0) { - tableName = getTableName(getRegionName()); + tableName = getTable(getRegionName()); } - return tableName; + return this.tableName; } /** @@ -813,7 +835,7 @@ public class HRegionInfo implements Comparable { public static RegionInfo convert(final HRegionInfo info) { if (info == null) return null; RegionInfo.Builder builder = RegionInfo.newBuilder(); - builder.setTableName(ProtobufUtil.toProtoTableName(info.getTableName())); + builder.setTableName(ProtobufUtil.toProtoTableName(info.getTable())); builder.setRegionId(info.getRegionId()); if (info.getStartKey() != null) { builder.setStartKey(ByteString.copyFrom(info.getStartKey())); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index 0e0e189..3d7683e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -546,6 +546,16 @@ public class HTableDescriptor implements WritableComparable { } /** + * Remove metadata represented by the key from the {@link #values} map + * + * @param key Key whose key and value we're to remove from HTableDescriptor + * parameters. + */ + public void remove(final byte [] key) { + remove(new ImmutableBytesWritable(key)); + } + + /** * Check if the readOnly flag of the table is set. If the readOnly flag is * set then the contents of the table can only be read from but not modified. * diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java index 9bbe8a9..5ee2607 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java @@ -85,6 +85,15 @@ public class ClientScanner extends AbstractClientScanner { this(conf, scan, tableName, HConnectionManager.getConnection(conf)); } + /** + * @deprecated Use {@link #ClientScanner(Configuration, Scan, TableName)} + */ + @Deprecated + public ClientScanner(final Configuration conf, final Scan scan, + final byte [] tableName) throws IOException { + this(conf, scan, TableName.valueOf(tableName)); + } + /** * Create a new ClientScanner for the specified table @@ -102,6 +111,15 @@ public class ClientScanner extends AbstractClientScanner { } /** + * @deprecated Use {@link #ClientScanner(Configuration, Scan, TableName, HConnection)} + */ + @Deprecated + public ClientScanner(final Configuration conf, final Scan scan, final byte [] tableName, + HConnection connection) throws IOException { + this(conf, scan, TableName.valueOf(tableName), connection, new RpcRetryingCallerFactory(conf)); + } + + /** * Create a new ClientScanner for the specified table Note that the passed {@link Scan}'s start * row maybe changed changed. * @param conf The {@link Configuration} to use. @@ -158,7 +176,11 @@ public class ClientScanner extends AbstractClientScanner { return this.connection; } - protected TableName getTableName() { + protected byte [] getTableName() { + return this.tableName.getName(); + } + + protected TableName getTable() { return this.tableName; } @@ -253,7 +275,7 @@ public class ClientScanner extends AbstractClientScanner { int nbRows) { scan.setStartRow(localStartKey); ScannerCallable s = new ScannerCallable(getConnection(), - getTableName(), scan, this.scanMetrics); + getTable(), scan, this.scanMetrics); s.setCaching(nbRows); return s; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index 3c6c20c..914e5a2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -1007,6 +1007,18 @@ public class HTable implements HTableInterface { } /** + * @deprecated Use {@link #incrementColumnValue(byte[], byte[], byte[], long, Durability)} + */ + @Deprecated + @Override + public long incrementColumnValue(final byte [] row, final byte [] family, + final byte [] qualifier, final long amount, final boolean writeToWAL) + throws IOException { + return incrementColumnValue(row, family, qualifier, amount, + writeToWAL? Durability.SKIP_WAL: Durability.USE_DEFAULT); + } + + /** * {@inheritDoc} */ @Override diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java index eee13b0..2d96ce0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.client; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -385,6 +386,14 @@ public interface HTableInterface extends Closeable { long amount, Durability durability) throws IOException; /** + * @deprecated Use {@link #incrementColumnValue(byte[], byte[], byte[], long, Durability)} + */ + @Deprecated + long incrementColumnValue(final byte [] row, final byte [] family, + final byte [] qualifier, final long amount, final boolean writeToWAL) + throws IOException; + + /** * Tells whether or not 'auto-flush' is turned on. * * @return {@code true} if 'auto-flush' is enabled (default), meaning diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java index 0eb3cfd..7194cf9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java @@ -102,6 +102,15 @@ public class ScannerCallable extends RegionServerCallable { } /** + * @deprecated Use {@link #ScannerCallable(HConnection, TableName, Scan, ScanMetrics)} + */ + @Deprecated + public ScannerCallable (HConnection connection, final byte [] tableName, Scan scan, + ScanMetrics scanMetrics) { + this(connection, TableName.valueOf(tableName), scan, scanMetrics); + } + + /** * @param reload force reload of server location * @throws IOException */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index 076218a..ec48858 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -518,6 +518,16 @@ public class LoadIncrementalHFiles extends Configured implements Tool { } /** + * @deprecated Use {@link #tryAtomicRegionLoad(HConnection, TableName, byte[], Collection)} + */ + @Deprecated + protected List tryAtomicRegionLoad(final HConnection conn, + final byte [] tableName, final byte[] first, Collection lqis) + throws IOException { + return tryAtomicRegionLoad(conn, TableName.valueOf(tableName), first, lqis); + } + + /** * Attempts to do an atomic load of many hfiles into a region. If it fails, * it returns a list of hfiles that need to be retried. If it is successful * it will return an empty list. @@ -531,9 +541,8 @@ public class LoadIncrementalHFiles extends Configured implements Tool { * failure */ protected List tryAtomicRegionLoad(final HConnection conn, - final TableName tableName, - final byte[] first, Collection lqis) throws IOException { - + final TableName tableName, final byte[] first, Collection lqis) + throws IOException { final List> famPaths = new ArrayList>(lqis.size()); for (LoadQueueItem lqi : lqis) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java index 5d11ed9..5bcada3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java @@ -85,11 +85,20 @@ implements Writable, Comparable { /** Default constructor. */ public TableSplit() { - this(null, null, HConstants.EMPTY_BYTE_ARRAY, + this((TableName)null, null, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, ""); } /** + * @deprecated Use {@link TableSplit#TableSplit(TableName, byte[], byte[], String)} + */ + @Deprecated + public TableSplit(final byte [] tableName, Scan scan, byte [] startRow, byte [] endRow, + final String location) { + this(TableName.valueOf(tableName), scan, startRow, endRow, location); + } + + /** * Creates a new instance while assigning all variables. * * @param tableName The name of the current table. @@ -111,7 +120,16 @@ implements Writable, Comparable { this.endRow = endRow; this.regionLocation = location; } - + + /** + * @deprecated Use {@link TableSplit#TableSplit(TableName, byte[], byte[], String)} + */ + @Deprecated + public TableSplit(final byte [] tableName, byte[] startRow, byte[] endRow, + final String location) { + this(TableName.valueOf(tableName), startRow, endRow, location); + } + /** * Creates a new instance without a scanner. * @@ -136,11 +154,20 @@ implements Writable, Comparable { } /** + * Returns the table name converted to a byte array. + * @see #getTable() + * @return The table name. + */ + public byte [] getTableName() { + return tableName.getName(); + } + + /** * Returns the table name. * * @return The table name. */ - public TableName getTableName() { + public TableName getTable() { return tableName; } @@ -268,7 +295,7 @@ implements Writable, Comparable { // If The table name of the two splits is the same then compare start row // otherwise compare based on table names int tableNameComparison = - getTableName().compareTo(split.getTableName()); + getTable().compareTo(split.getTable()); return tableNameComparison != 0 ? tableNameComparison : Bytes.compareTo( getStartRow(), split.getStartRow()); }