diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index c01e722..75c060e 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionReplicaUtil; @@ -198,7 +199,7 @@ public class MetaTableAccessor { * @param g Get to run * @throws IOException */ - private static Result get(final HTable t, final Get g) throws IOException { + private static Result get(final HTableInterface t, final Get g) throws IOException { try { return t.get(g); } finally { @@ -597,7 +598,7 @@ public class MetaTableAccessor { scan.setCaching(caching); } scan.addFamily(HConstants.CATALOG_FAMILY); - HTable metaTable = getMetaHTable(hConnection); + HTableInterface metaTable = getMetaHTable(hConnection); ResultScanner scanner = null; try { scanner = metaTable.getScanner(scan); @@ -946,7 +947,7 @@ public class MetaTableAccessor { * @param p put to make * @throws IOException */ - private static void put(final HTable t, final Put p) throws IOException { + private static void put(final HTableInterface t, final Put p) throws IOException { try { t.put(p); } finally { @@ -962,7 +963,7 @@ public class MetaTableAccessor { */ public static void putsToMetaTable(final HConnection hConnection, final List ps) throws IOException { - HTable t = getMetaHTable(hConnection); + HTableInterface t = getMetaHTable(hConnection); try { t.put(ps); } finally { @@ -991,7 +992,7 @@ public class MetaTableAccessor { */ public static void deleteFromMetaTable(final HConnection hConnection, final List deletes) throws IOException { - HTable t = getMetaHTable(hConnection); + HTableInterface t = getMetaHTable(hConnection); try { t.delete(deletes); } finally { @@ -1034,7 +1035,7 @@ public class MetaTableAccessor { public static void mutateMetaTable(final HConnection hConnection, final List mutations) throws IOException { - HTable t = getMetaHTable(hConnection); + HTableInterface t = getMetaHTable(hConnection); try { t.batch(mutations); } catch (InterruptedException e) { @@ -1066,7 +1067,7 @@ public class MetaTableAccessor { * @param regionInfo region information * @throws IOException if problem connecting or updating meta */ - public static void addRegionToMeta(HTable meta, HRegionInfo regionInfo) throws IOException { + public static void addRegionToMeta(HTableInterface meta, HRegionInfo regionInfo) throws IOException { addRegionToMeta(meta, regionInfo, null, null); } @@ -1083,7 +1084,7 @@ public class MetaTableAccessor { * @param splitB second split daughter of the parent regionInfo * @throws IOException if problem connecting or updating meta */ - public static void addRegionToMeta(HTable meta, HRegionInfo regionInfo, + public static void addRegionToMeta(HTableInterface meta, HRegionInfo regionInfo, HRegionInfo splitA, HRegionInfo splitB) throws IOException { Put put = makePutFromRegionInfo(regionInfo); addDaughtersToPut(put, splitA, splitB); @@ -1107,7 +1108,7 @@ public class MetaTableAccessor { */ public static void addRegionToMeta(HConnection hConnection, HRegionInfo regionInfo, HRegionInfo splitA, HRegionInfo splitB) throws IOException { - HTable meta = getMetaHTable(hConnection); + HTableInterface meta = getMetaHTable(hConnection); try { addRegionToMeta(meta, regionInfo, splitA, splitB); } finally { @@ -1166,7 +1167,7 @@ public class MetaTableAccessor { */ public static void mergeRegions(final HConnection hConnection, HRegionInfo mergedRegion, HRegionInfo regionA, HRegionInfo regionB, ServerName sn) throws IOException { - HTable meta = getMetaHTable(hConnection); + HTableInterface meta = getMetaHTable(hConnection); try { HRegionInfo copyOfMerged = new HRegionInfo(mergedRegion); @@ -1206,7 +1207,7 @@ public class MetaTableAccessor { public static void splitRegion(final HConnection hConnection, HRegionInfo parent, HRegionInfo splitA, HRegionInfo splitB, ServerName sn) throws IOException { - HTable meta = getMetaHTable(hConnection); + HTableInterface meta = getMetaHTable(hConnection); try { HRegionInfo copyOfParent = new HRegionInfo(parent); copyOfParent.setOffline(true); @@ -1233,7 +1234,7 @@ public class MetaTableAccessor { /** * Performs an atomic multi-Mutate operation against the given table. */ - private static void multiMutate(HTable table, byte[] row, Mutation... mutations) + private static void multiMutate(HTableInterface table, byte[] row, Mutation... mutations) throws IOException { CoprocessorRpcChannel channel = table.coprocessorService(row); MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index 1265a5f..6663689 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -1778,7 +1778,7 @@ public class HTable implements HTableInterface { * @throws IOException */ public static void main(String[] args) throws IOException { - HTable t = new HTable(HBaseConfiguration.create(), args[0]); + HTableInterface t = new HTable(HBaseConfiguration.create(), args[0]); try { System.out.println(t.get(new Get(Bytes.toBytes(args[1])))); } finally { diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java index 9d378ae..91f5c7a 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java @@ -212,7 +212,7 @@ public class HTableMultiplexer { } private synchronized LinkedBlockingQueue addNewRegionServer( - HRegionLocation addr, HTable htable) { + HRegionLocation addr, HTableInterface htable) { LinkedBlockingQueue queue = serverToBufferQueueMap.get(addr); if (queue == null) { @@ -413,11 +413,11 @@ public class HTableMultiplexer { private AtomicInteger currentProcessingPutCount; private AtomicAverageCounter averageLatency; private AtomicLong maxLatency; - private HTable htable; // For Multi + private HTableInterface htable; // For Multi public HTableFlushWorker(Configuration conf, HRegionLocation addr, HTableMultiplexer htableMultiplexer, - LinkedBlockingQueue queue, HTable htable) { + LinkedBlockingQueue queue, HTableInterface htable) { this.addr = addr; this.conf = conf; this.htableMultiplexer = htableMultiplexer; diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java index 062779a..88c0d84 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java @@ -140,7 +140,7 @@ public class MetaScanner { // Calculate startrow for scan. byte[] startRow; ResultScanner scanner = null; - HTable metaTable = null; + HTableInterface metaTable = null; try { metaTable = new HTable(TableName.META_TABLE_NAME, connection, null); if (row != null) { diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java index 3b37238..9f66722 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -101,7 +102,7 @@ public class AggregationClient { public R max( final TableName tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, tableName); return max(table, ci, scan); @@ -125,7 +126,7 @@ public class AggregationClient { * & propagated to it. */ public - R max(final HTable table, final ColumnInterpreter ci, + R max(final HTableInterface table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); class MaxCallBack implements Batch.Callback { @@ -196,7 +197,7 @@ public class AggregationClient { public R min( final TableName tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, tableName); return min(table, ci, scan); @@ -218,7 +219,7 @@ public class AggregationClient { * @throws Throwable */ public - R min(final HTable table, final ColumnInterpreter ci, + R min(final HTableInterface table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); class MinCallBack implements Batch.Callback { @@ -276,7 +277,7 @@ public class AggregationClient { public long rowCount( final TableName tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, tableName); return rowCount(table, ci, scan); @@ -301,7 +302,7 @@ public class AggregationClient { * @throws Throwable */ public - long rowCount(final HTable table, + long rowCount(final HTableInterface table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, true); class RowNumCallback implements Batch.Callback { @@ -350,7 +351,7 @@ public class AggregationClient { public S sum( final TableName tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, tableName); return sum(table, ci, scan); @@ -371,7 +372,7 @@ public class AggregationClient { * @throws Throwable */ public - S sum(final HTable table, final ColumnInterpreter ci, + S sum(final HTableInterface table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); @@ -423,7 +424,7 @@ public class AggregationClient { private Pair getAvgArgs( final TableName tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, tableName); return getAvgArgs(table, ci, scan); @@ -443,7 +444,7 @@ public class AggregationClient { * @throws Throwable */ private - Pair getAvgArgs(final HTable table, + Pair getAvgArgs(final HTableInterface table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); class AvgCallBack implements Batch.Callback> { @@ -523,7 +524,7 @@ public class AggregationClient { * @throws Throwable */ public double avg( - final HTable table, final ColumnInterpreter ci, Scan scan) throws Throwable { + final HTableInterface table, final ColumnInterpreter ci, Scan scan) throws Throwable { Pair p = getAvgArgs(table, ci, scan); return ci.divideForAvg(p.getFirst(), p.getSecond()); } @@ -540,7 +541,7 @@ public class AggregationClient { * @throws Throwable */ private - Pair, Long> getStdArgs(final HTable table, + Pair, Long> getStdArgs(final HTableInterface table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); class StdCallback implements Batch.Callback, Long>> { @@ -614,7 +615,7 @@ public class AggregationClient { public double std(final TableName tableName, ColumnInterpreter ci, Scan scan) throws Throwable { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, tableName); return std(table, ci, scan); @@ -638,7 +639,7 @@ public class AggregationClient { * @throws Throwable */ public double std( - final HTable table, ColumnInterpreter ci, Scan scan) throws Throwable { + final HTableInterface table, ColumnInterpreter ci, Scan scan) throws Throwable { Pair, Long> p = getStdArgs(table, ci, scan); double res = 0d; double avg = ci.divideForAvg(p.getFirst().get(0), p.getSecond()); @@ -662,7 +663,7 @@ public class AggregationClient { */ private Pair>, List> - getMedianArgs(final HTable table, + getMedianArgs(final HTableInterface table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); final NavigableMap> map = @@ -727,7 +728,7 @@ public class AggregationClient { public R median(final TableName tableName, ColumnInterpreter ci, Scan scan) throws Throwable { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, tableName); return median(table, ci, scan); @@ -749,7 +750,7 @@ public class AggregationClient { * @throws Throwable */ public - R median(final HTable table, ColumnInterpreter ci, + R median(final HTableInterface table, ColumnInterpreter ci, Scan scan) throws Throwable { Pair>, List> p = getMedianArgs(table, ci, scan); byte[] startRow = null; diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java index 48986b1..a19d211 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.client.coprocessor; import static org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW; import static org.apache.hadoop.hbase.HConstants.LAST_ROW; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.classification.InterfaceAudience; @@ -47,9 +48,9 @@ import java.util.List; */ @InterfaceAudience.Private public class SecureBulkLoadClient { - private HTable table; + private HTableInterface table; - public SecureBulkLoadClient(HTable table) { + public SecureBulkLoadClient(HTableInterface table) { this.table = table; } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java index 35c1412..410665a 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; @@ -70,7 +71,7 @@ public class AccessControlClient { public static GrantResponse grant(Configuration conf, final TableName tableName, final String userName, final byte[] family, final byte[] qual, final AccessControlProtos.Permission.Action... actions) throws Throwable { - HTable ht = null; + HTableInterface ht = null; try { TableName aclTableName = TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "acl"); @@ -150,7 +151,7 @@ public class AccessControlClient { public static RevokeResponse revoke(Configuration conf, final String username, final TableName tableName, final byte[] family, final byte[] qualifier, final AccessControlProtos.Permission.Action... actions) throws Throwable { - HTable ht = null; + HTableInterface ht = null; try { TableName aclTableName = TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "acl"); @@ -211,7 +212,7 @@ public class AccessControlClient { public static List getUserPermissions(Configuration conf, String tableRegex) throws Throwable { List permList = new ArrayList(); - HTable ht = null; + HTableInterface ht = null; HBaseAdmin ha = null; try { TableName aclTableName = diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java index 8a17994..184138e 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java @@ -22,6 +22,7 @@ import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LA import java.io.IOException; import java.util.Map; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -72,7 +73,7 @@ public class VisibilityClient { */ public static VisibilityLabelsResponse addLabels(Configuration conf, final String[] labels) throws Throwable { - HTable ht = null; + HTableInterface ht = null; try { ht = new HTable(conf, LABELS_TABLE_NAME.getName()); Batch.Call callable = @@ -126,7 +127,7 @@ public class VisibilityClient { * @throws Throwable */ public static GetAuthsResponse getAuths(Configuration conf, final String user) throws Throwable { - HTable ht = null; + HTableInterface ht = null; try { ht = new HTable(conf, LABELS_TABLE_NAME.getName()); Batch.Call callable = @@ -168,7 +169,7 @@ public class VisibilityClient { private static VisibilityLabelsResponse setOrClearAuths(Configuration conf, final String[] auths, final String user, final boolean setOrClear) throws IOException, ServiceException, Throwable { - HTable ht = null; + HTableInterface ht = null; try { ht = new HTable(conf, LABELS_TABLE_NAME.getName()); Batch.Call callable = diff --git hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java index 9e878b4..4b90561 100644 --- hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java +++ hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java @@ -150,7 +150,7 @@ public class TestClientNoCluster extends Configured implements Tool { Configuration localConfig = HBaseConfiguration.create(this.conf); // This override mocks up our exists/get call to throw a RegionServerStoppedException. localConfig.set("hbase.client.connection.impl", RpcTimeoutConnection.class.getName()); - HTable table = new HTable(localConfig, TableName.META_TABLE_NAME); + HTableInterface table = new HTable(localConfig, TableName.META_TABLE_NAME); Throwable t = null; LOG.info("Start"); try { @@ -187,7 +187,7 @@ public class TestClientNoCluster extends Configured implements Tool { // and it has expired. Otherwise, if this functionality is broke, all retries will be run -- // all ten of them -- and we'll get the RetriesExhaustedException exception. localConfig.setInt(HConstants.HBASE_CLIENT_META_OPERATION_TIMEOUT, pause - 1); - HTable table = new HTable(localConfig, TableName.META_TABLE_NAME); + HTableInterface table = new HTable(localConfig, TableName.META_TABLE_NAME); Throwable t = null; try { // An exists call turns into a get w/ a flag. @@ -219,7 +219,7 @@ public class TestClientNoCluster extends Configured implements Tool { // Go against meta else we will try to find first region for the table on construction which // means we'll have to do a bunch more mocking. Tests that go against meta only should be // good for a bit of testing. - HTable table = new HTable(this.conf, TableName.META_TABLE_NAME); + HTableInterface table = new HTable(this.conf, TableName.META_TABLE_NAME); ResultScanner scanner = table.getScanner(HConstants.CATALOG_FAMILY); try { Result result = null; @@ -239,7 +239,7 @@ public class TestClientNoCluster extends Configured implements Tool { // Go against meta else we will try to find first region for the table on construction which // means we'll have to do a bunch more mocking. Tests that go against meta only should be // good for a bit of testing. - HTable table = new HTable(this.conf, TableName.META_TABLE_NAME); + HTableInterface table = new HTable(this.conf, TableName.META_TABLE_NAME); ResultScanner scanner = table.getScanner(HConstants.CATALOG_FAMILY); try { Result result = null; diff --git hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java index 8dbb16c..8953565 100644 --- hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java +++ hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; @@ -78,7 +79,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteEndpoint() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteEndpoint"); - HTable ht = createTable(tableName); + HTableInterface ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { byte[] rowkey = Bytes.toBytes(j); @@ -102,7 +103,7 @@ public class TestBulkDeleteProtocol { throws Throwable { byte[] tableName = Bytes .toBytes("testBulkDeleteEndpointWhenRowBatchSizeLessThanRowsToDeleteFromARegion"); - HTable ht = createTable(tableName); + HTableInterface ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { byte[] rowkey = Bytes.toBytes(j); @@ -123,7 +124,7 @@ public class TestBulkDeleteProtocol { private long invokeBulkDeleteProtocol(byte[] tableName, final Scan scan, final int rowBatchSize, final DeleteType deleteType, final Long timeStamp) throws Throwable { - HTable ht = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface ht = new HTable(TEST_UTIL.getConfiguration(), tableName); long noOfDeletedRows = 0L; Batch.Call callable = new Batch.Call() { @@ -155,7 +156,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteWithConditionBasedDelete() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteWithConditionBasedDelete"); - HTable ht = createTable(tableName); + HTableInterface ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { byte[] rowkey = Bytes.toBytes(j); @@ -185,7 +186,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteColumn() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteColumn"); - HTable ht = createTable(tableName); + HTableInterface ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { byte[] rowkey = Bytes.toBytes(j); @@ -218,7 +219,7 @@ public class TestBulkDeleteProtocol { htd.addFamily(new HColumnDescriptor(FAMILY1)); htd.addFamily(new HColumnDescriptor(FAMILY2)); TEST_UTIL.getHBaseAdmin().createTable(htd, Bytes.toBytes(0), Bytes.toBytes(120), 5); - HTable ht = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface ht = new HTable(TEST_UTIL.getConfiguration(), tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { Put put = new Put(Bytes.toBytes(j)); @@ -245,7 +246,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteColumnVersion() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteColumnVersion"); - HTable ht = createTable(tableName); + HTableInterface ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { Put put = new Put(Bytes.toBytes(j)); @@ -293,7 +294,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteColumnVersionBasedOnTS() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteColumnVersionBasedOnTS"); - HTable ht = createTable(tableName); + HTableInterface ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { Put put = new Put(Bytes.toBytes(j)); @@ -340,7 +341,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteWithNumberOfVersions() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteWithNumberOfVersions"); - HTable ht = createTable(tableName); + HTableInterface ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { Put put = new Put(Bytes.toBytes(j)); @@ -422,13 +423,13 @@ public class TestBulkDeleteProtocol { ht.close(); } - private HTable createTable(byte[] tableName) throws IOException { + private HTableInterface createTable(byte[] tableName) throws IOException { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName)); HColumnDescriptor hcd = new HColumnDescriptor(FAMILY1); hcd.setMaxVersions(10);// Just setting 10 as I am not testing with more than 10 versions here htd.addFamily(hcd); TEST_UTIL.getHBaseAdmin().createTable(htd, Bytes.toBytes(0), Bytes.toBytes(120), 5); - HTable ht = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface ht = new HTable(TEST_UTIL.getConfiguration(), tableName); return ht; } diff --git hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java index fb04b4d..92ab4de 100644 --- hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java +++ hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java @@ -22,6 +22,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -72,7 +73,7 @@ public class TestRowCountEndpoint { // @Ignore @Test public void testEndpoint() throws Throwable { - HTable table = new HTable(CONF, TEST_TABLE); + HTableInterface table = new HTable(CONF, TEST_TABLE); // insert some test rows for (int i=0; i<5; i++) { diff --git hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java index 4ab53c7..3e50a01 100644 --- hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java +++ hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -74,7 +75,7 @@ public class TestZooKeeperScanPolicyObserver { .setTimeToLive(1); desc.addFamily(hcd); TEST_UTIL.getHBaseAdmin().createTable(desc); - HTable t = new HTable(new Configuration(TEST_UTIL.getConfiguration()), tableName); + HTableInterface t = new HTable(new Configuration(TEST_UTIL.getConfiguration()), tableName); long now = EnvironmentEdgeManager.currentTimeMillis(); ZooKeeperWatcher zkw = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(), "test", null); diff --git hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java index 377bbdd..8a29540 100644 --- hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java +++ hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java @@ -28,6 +28,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -222,7 +223,7 @@ public class IntegrationTestLazyCfLoading { long maxRuntime = conf.getLong(timeoutKey, DEFAULT_TIMEOUT_MINUTES); long serverCount = util.getHBaseClusterInterface().getClusterStatus().getServersSize(); long keysToWrite = serverCount * KEYS_TO_WRITE_PER_SERVER; - HTable table = new HTable(conf, TABLE_NAME); + HTableInterface table = new HTable(conf, TABLE_NAME); // Create multi-threaded writer and start it. We write multiple columns/CFs and verify // their integrity, therefore multi-put is necessary. diff --git hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRsHoldingTableAction.java hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRsHoldingTableAction.java index b6c1f2e..86da9cb 100644 --- hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRsHoldingTableAction.java +++ hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRsHoldingTableAction.java @@ -25,6 +25,7 @@ import org.apache.commons.lang.math.RandomUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; /** * Action that restarts an HRegionServer holding one of the regions of the table. diff --git hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java index 1e2203a..d2611d0 100644 --- hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java +++ hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Consistency; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; diff --git hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java index 766c66f..89c693e 100644 --- hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java +++ hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; @@ -153,7 +154,7 @@ public class IntegrationTestImportTsv implements Configurable, Tool { assertEquals("Loading HFiles failed.", 0, ToolRunner.run(new LoadIncrementalHFiles(new Configuration(getConf())), args)); - HTable table = null; + HTableInterface table = null; Scan scan = new Scan() {{ setCacheBlocks(false); setCaching(1000); diff --git hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java index 31c67af..790eecf 100644 --- hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java +++ hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingTableAction; import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -460,7 +461,7 @@ public class IntegrationTestMTTR { */ static class PutCallable extends TimingCallable { - private final HTable table; + private final HTableInterface table; public PutCallable(Future f) throws IOException { super(f); @@ -487,7 +488,7 @@ public class IntegrationTestMTTR { * supplied future returns. Returns the max time taken to scan. */ static class ScanCallable extends TimingCallable { - private final HTable table; + private final HTableInterface table; public ScanCallable(Future f) throws IOException { super(f); diff --git hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index d670a5e..21c69ea 100644 --- hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -55,6 +55,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -336,7 +337,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { byte[] id; long count = 0; int i; - HTable table; + HTableInterface table; long numNodes; long wrap; int width; @@ -873,7 +874,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { System.exit(-1); } - HTable table = new HTable(getConf(), getTableName(getConf())); + HTableInterface table = new HTable(getConf(), getTableName(getConf())); Scan scan = new Scan(); scan.setBatch(10000); @@ -923,7 +924,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { org.apache.hadoop.hbase.client.Delete delete = new org.apache.hadoop.hbase.client.Delete(val); - HTable table = new HTable(getConf(), getTableName(getConf())); + HTableInterface table = new HTable(getConf(), getTableName(getConf())); table.delete(delete); table.flushCommits(); @@ -969,7 +970,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { byte[] startKey = isSpecificStart ? Bytes.toBytesBinary(cmd.getOptionValue('s')) : null; int logEvery = cmd.hasOption('l') ? Integer.parseInt(cmd.getOptionValue('l')) : 1; - HTable table = new HTable(getConf(), getTableName(getConf())); + HTableInterface table = new HTable(getConf(), getTableName(getConf())); long numQueries = 0; // If isSpecificStart is set, only walk one list from that particular node. // Note that in case of circular (or P-shaped) list it will walk forever, as is @@ -1005,7 +1006,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { return 0; } - private static CINode findStartNode(HTable table, byte[] startKey) throws IOException { + private static CINode findStartNode(HTableInterface table, byte[] startKey) throws IOException { Scan scan = new Scan(); scan.setStartRow(startKey); scan.setBatch(1); @@ -1028,7 +1029,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { return null; } - private CINode getNode(byte[] row, HTable table, CINode node) throws IOException { + private CINode getNode(byte[] row, HTableInterface table, CINode node) throws IOException { Get get = new Get(row); get.addColumn(FAMILY_NAME, COLUMN_PREV); Result result = table.get(get); diff --git hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java index 9748b31..ec8fc5e 100644 --- hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java +++ hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java @@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; @@ -170,8 +171,8 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB } static class VisibilityGeneratorMapper extends GeneratorMapper { - HTable[] tables = new HTable[DEFAULT_TABLES_COUNT]; - HTable commonTable = null; + HTableInterface[] tables = new HTable[DEFAULT_TABLES_COUNT]; + HTableInterface commonTable = null; @Override protected void setup(org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, @@ -182,7 +183,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB @Override protected void instantiateHTable(Configuration conf) throws IOException { for (int i = 0; i < DEFAULT_TABLES_COUNT; i++) { - HTable table = new HTable(conf, getTableName(i)); + HTableInterface table = new HTable(conf, getTableName(i)); table.setAutoFlush(true, true); //table.setWriteBufferSize(4 * 1024 * 1024); this.tables[i] = table; diff --git hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java index 322dd81..661c4ff 100644 --- hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java +++ hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; @@ -163,7 +164,7 @@ public void cleanUpCluster() throws Exception { extends Mapper { protected long recordsToWrite; - protected HTable table; + protected HTableInterface table; protected Configuration conf; protected int numBackReferencesPerRow; protected String shortTaskId; diff --git hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java index 86ccff3..f06a7bb 100644 --- hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java +++ hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -128,7 +129,7 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool { ResultScanner rs = null; try { innerScope = Trace.startSpan("Scan", Sampler.ALWAYS); - HTable ht = new HTable(util.getConfiguration(), tableName); + HTableInterface ht = new HTable(util.getConfiguration(), tableName); Scan s = new Scan(); s.setStartRow(Bytes.toBytes(rowKeyQueue.take())); s.setBatch(7); @@ -176,7 +177,7 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool { public void run() { - HTable ht = null; + HTableInterface ht = null; try { ht = new HTable(util.getConfiguration(), tableName); } catch (IOException e) { @@ -238,7 +239,7 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool { private LinkedBlockingQueue insertData() throws IOException, InterruptedException { LinkedBlockingQueue rowKeys = new LinkedBlockingQueue(25000); - HTable ht = new HTable(util.getConfiguration(), this.tableName); + HTableInterface ht = new HTable(util.getConfiguration(), this.tableName); byte[] value = new byte[300]; for (int x = 0; x < 5000; x++) { TraceScope traceScope = Trace.startSpan("insertData", Sampler.ALWAYS); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java index 057c493..b690d52 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java @@ -70,7 +70,7 @@ import com.google.protobuf.ServiceException; public class HTableWrapper implements HTableInterface { private TableName tableName; - private HTable table; + private HTableInterface table; private ClusterConnection connection; private final List openTables; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java index 327e404..127eaf8 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java @@ -27,6 +27,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.fs.FileAlreadyExistsException; @@ -56,14 +57,14 @@ FileOutputFormat { */ protected static class TableRecordWriter implements RecordWriter { - private HTable m_table; + private HTableInterface m_table; /** * Instantiate a TableRecordWriter with the HBase HClient for writing. * * @param table */ - public TableRecordWriter(HTable table) { + public TableRecordWriter(HTableInterface table) { m_table = table; } @@ -86,7 +87,7 @@ FileOutputFormat { // expecting exactly one path String tableName = job.get(OUTPUT_TABLE); - HTable table = null; + HTableInterface table = null; try { table = new HTable(HBaseConfiguration.create(job), tableName); } catch(IOException e) { diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java index 7713180..1922ec9 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java @@ -23,6 +23,7 @@ import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -62,7 +63,7 @@ implements RecordReader { /** * @param htable the {@link HTable} to scan. */ - public void setHTable(HTable htable) { + public void setHTable(HTableInterface htable) { this.recordReaderImpl.setHTable(htable); } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java index f4043f4..49abb40 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java @@ -26,6 +26,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -52,7 +53,7 @@ public class TableRecordReaderImpl { private byte [] lastSuccessfulRow; private Filter trrRowFilter; private ResultScanner scanner; - private HTable htable; + private HTableInterface htable; private byte [][] trrInputColumns; private long timestamp; private int rowcount; @@ -116,7 +117,7 @@ public class TableRecordReaderImpl { /** * @param htable the {@link HTable} to scan. */ - public void setHTable(HTable htable) { + public void setHTable(HTableInterface htable) { Configuration conf = htable.getConfiguration(); logScannerActivity = conf.getBoolean( ScannerCallable.LOG_SCANNER_ACTIVITY, false); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java index df063a4..f1b6b66 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java @@ -29,6 +29,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; @@ -160,7 +161,7 @@ public class HFileOutputFormat extends FileOutputFormat(); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index df8a840..8c765f4 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -66,6 +66,7 @@ import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.RegionServerCallable; import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory; import org.apache.hadoop.hbase.client.coprocessor.SecureBulkLoadClient; @@ -350,7 +351,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool { * them. Any failures are re-queued for another pass with the * groupOrSplitPhase. */ - protected void bulkLoadPhase(final HTable table, final HConnection conn, + protected void bulkLoadPhase(final HTableInterface table, final HConnection conn, ExecutorService pool, Deque queue, final Multimap regionGroups) throws IOException { // atomically bulk load the groups. @@ -422,7 +423,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool { * @return A Multimap that groups LQI by likely * bulk load region targets. */ - private Multimap groupOrSplitPhase(final HTable table, + private Multimap groupOrSplitPhase(final HTableInterface table, ExecutorService pool, Deque queue, final Pair startEndKeys) throws IOException { // need synchronized only within this scope of this @@ -473,7 +474,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool { } protected List splitStoreFile(final LoadQueueItem item, - final HTable table, byte[] startKey, + final HTableInterface table, byte[] startKey, byte[] splitKey) throws IOException { final Path hfilePath = item.hfilePath; @@ -515,7 +516,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool { * @throws IOException */ protected List groupOrSplit(Multimap regionGroups, - final LoadQueueItem item, final HTable table, + final LoadQueueItem item, final HTableInterface table, final Pair startEndKeys) throws IOException { final Path hfilePath = item.hfilePath; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java index c50e3d5..941cda4 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java @@ -29,6 +29,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -84,7 +85,7 @@ public abstract class MultiTableInputFormatBase extends + " previous error. Please look at the previous logs lines from" + " the task's full log for more details."); } - HTable table = + HTableInterface table = new HTable(context.getConfiguration(), tSplit.getTableName()); TableRecordReader trr = this.tableRecordReader; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java index 37b4f5a..5b471d5 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java @@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Durability; @@ -128,7 +129,7 @@ public class MultiTableOutputFormat extends OutputFormat { /** The table to write to. */ - private HTable table; + private HTableInterface table; /** * Instantiate a TableRecordWriter with the HBase HClient for writing. * * @param table The table to write to. */ - public TableRecordWriter(HTable table) { + public TableRecordWriter(HTableInterface table) { this.table = table; } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java index 7db2c56..7674f59 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java @@ -23,6 +23,7 @@ import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -57,7 +58,7 @@ extends RecordReader { * * @param htable The {@link HTable} to scan. */ - public void setHTable(HTable htable) { + public void setHTable(HTableInterface htable) { this.recordReaderImpl.setHTable(htable); } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java index e8e6e8b..32d296d 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java @@ -27,6 +27,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -58,7 +59,7 @@ public class TableRecordReaderImpl { private ResultScanner scanner = null; private Scan scan = null; private Scan currentScan = null; - private HTable htable = null; + private HTableInterface htable = null; private byte[] lastSuccessfulRow = null; private ImmutableBytesWritable key = null; private Result value = null; @@ -121,7 +122,7 @@ public class TableRecordReaderImpl { * * @param htable The {@link HTable} to scan. */ - public void setHTable(HTable htable) { + public void setHTable(HTableInterface htable) { Configuration conf = htable.getConfiguration(); logScannerActivity = conf.getBoolean( ScannerCallable.LOG_SCANNER_ACTIVITY, false); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java index d4ac8f7..80dfef2 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.client.HConnectable; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -116,7 +117,7 @@ public class VerifyReplication extends Configured implements Tool { Configuration peerConf = HBaseConfiguration.create(conf); ZKUtil.applyClusterKeyToConf(peerConf, zkClusterKey); - HTable replicatedTable = new HTable(peerConf, conf.get(NAME + ".tableName")); + HTableInterface replicatedTable = new HTable(peerConf, conf.get(NAME + ".tableName")); scan.setStartRow(value.getRow()); replicatedScanner = replicatedTable.getScanner(scan); return null; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java index 99d794d..cc8dd58 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -125,14 +126,14 @@ public class TableNamespaceManager { } public synchronized void update(NamespaceDescriptor ns) throws IOException { - HTable table = getNamespaceTable(); + HTableInterface table = getNamespaceTable(); if (get(table, ns.getName()) == null) { throw new NamespaceNotFoundException(ns.getName()); } upsert(table, ns); } - private NamespaceDescriptor get(HTable table, String name) throws IOException { + private NamespaceDescriptor get(HTableInterface table, String name) throws IOException { Result res = table.get(new Get(Bytes.toBytes(name))); if (res.isEmpty()) { return null; @@ -144,7 +145,7 @@ public class TableNamespaceManager { HBaseProtos.NamespaceDescriptor.parseFrom(val)); } - private void create(HTable table, NamespaceDescriptor ns) throws IOException { + private void create(HTableInterface table, NamespaceDescriptor ns) throws IOException { if (get(table, ns.getName()) != null) { throw new NamespaceExistException(ns.getName()); } @@ -154,7 +155,7 @@ public class TableNamespaceManager { upsert(table, ns); } - private void upsert(HTable table, NamespaceDescriptor ns) throws IOException { + private void upsert(HTableInterface table, NamespaceDescriptor ns) throws IOException { Put p = new Put(Bytes.toBytes(ns.getName())); p.addImmutable(HTableDescriptor.NAMESPACE_FAMILY_INFO_BYTES, HTableDescriptor.NAMESPACE_COL_DESC_BYTES, diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java index ae59f26..830a22e 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.master.RackManager; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; @@ -120,7 +121,7 @@ public class FavoredNodeAssignmentHelper { } } // Write the region assignments to the meta table. - HTable metaTable = null; + HTableInterface metaTable = null; try { metaTable = new HTable(conf, TableName.META_TABLE_NAME); metaTable.put(puts); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/ModifyTableHandler.java hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/ModifyTableHandler.java index 48fb26f..4a9a738 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/ModifyTableHandler.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/ModifyTableHandler.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -94,7 +95,7 @@ public class ModifyTableHandler extends TableEventHandler { Set tableRows = new HashSet(); Scan scan = MetaTableAccessor.getScanForTableName(table); scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); - HTable htable = null; + HTableInterface htable = null; try { htable = new HTable(masterServices.getConfiguration(), TableName.META_TABLE_NAME); ResultScanner resScanner = htable.getScanner(scan); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index c00be7b..48f24af 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -172,7 +173,7 @@ public class AccessControlLists { Bytes.toString(key)+": "+Bytes.toStringBinary(value) ); } - HTable acls = null; + HTableInterface acls = null; try { acls = new HTable(conf, ACL_TABLE_NAME); acls.put(p); @@ -203,7 +204,7 @@ public class AccessControlLists { LOG.debug("Removing permission "+ userPerm.toString()); } d.deleteColumns(ACL_LIST_FAMILY, key); - HTable acls = null; + HTableInterface acls = null; try { acls = new HTable(conf, ACL_TABLE_NAME); acls.delete(d); @@ -223,7 +224,7 @@ public class AccessControlLists { LOG.debug("Removing permissions of removed table "+ tableName); } - HTable acls = null; + HTableInterface acls = null; try { acls = new HTable(conf, ACL_TABLE_NAME); acls.delete(d); @@ -243,7 +244,7 @@ public class AccessControlLists { LOG.debug("Removing permissions of removed namespace "+ namespace); } - HTable acls = null; + HTableInterface acls = null; try { acls = new HTable(conf, ACL_TABLE_NAME); acls.delete(d); @@ -263,7 +264,7 @@ public class AccessControlLists { " from table "+ tableName); } - HTable acls = null; + HTableInterface acls = null; try { acls = new HTable(conf, ACL_TABLE_NAME); @@ -424,7 +425,7 @@ public class AccessControlLists { Scan scan = new Scan(); scan.addFamily(ACL_LIST_FAMILY); - HTable acls = null; + HTableInterface acls = null; ResultScanner scanner = null; try { acls = new HTable(conf, ACL_TABLE_NAME); @@ -467,7 +468,7 @@ public class AccessControlLists { // for normal user tables, we just read the table row from _acl_ ListMultimap perms = ArrayListMultimap.create(); - HTable acls = null; + HTableInterface acls = null; try { acls = new HTable(conf, ACL_TABLE_NAME); Get get = new Get(entryName); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java index cdc824d..a3ecfb3 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java @@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos; @@ -53,7 +54,7 @@ public class TokenUtil { */ public static Token obtainToken( Configuration conf) throws IOException { - HTable meta = null; + HTableInterface meta = null; try { meta = new HTable(conf, TableName.META_TABLE_NAME); CoprocessorRpcChannel rpcChannel = meta.coprocessorService(HConstants.EMPTY_START_ROW); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java index 489aaf1..348be67 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.util.Tool; @@ -476,7 +477,7 @@ public final class Canary implements Tool { */ private static void sniff(final Admin admin, final Sink sink, HTableDescriptor tableDesc) throws Exception { - HTable table = null; + HTableInterface table = null; try { table = new HTable(admin.getConfiguration(), tableDesc.getName()); @@ -506,7 +507,7 @@ public final class Canary implements Tool { final Admin admin, final Sink sink, HRegionInfo region, - HTable table) throws Exception { + HTableInterface table) throws Exception { HTableDescriptor tableDesc = table.getTableDescriptor(); byte[] startKey = null; Get get = null; @@ -607,7 +608,7 @@ public final class Canary implements Tool { String serverName = null; String tableName = null; HRegionInfo region = null; - HTable table = null; + HTableInterface table = null; Get get = null; byte[] startKey = null; Scan scan = null; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index e5365da..c47dc51 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -80,6 +80,7 @@ import org.apache.hadoop.hbase.client.HConnectable; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.MetaScanner; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor; @@ -185,7 +186,7 @@ public class HBaseFsck extends Configured { private ClusterStatus status; private HConnection connection; private HBaseAdmin admin; - private HTable meta; + private HTableInterface meta; // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions protected ExecutorService executor; private long startMillis = System.currentTimeMillis(); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java index 84ffec8..c309f86 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.master.RegionState; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; @@ -178,7 +179,7 @@ public class HBaseFsckRepair { */ public static void fixMetaHoleOnlineAndAddReplicas(Configuration conf, HRegionInfo hri, Collection servers, int numReplicas) throws IOException { - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); Put put = MetaTableAccessor.makePutFromRegionInfo(hri); if (numReplicas > 1) { Random r = new Random(); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java index f3ac5cc..8b0431d 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.client.HConnectable; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -231,7 +232,7 @@ class HMerge { /** Instantiated to compact a normal user table */ private static class OnlineMerger extends Merger { private final TableName tableName; - private final HTable table; + private final HTableInterface table; private final ResultScanner metaScanner; private HRegionInfo latestRegion; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java index e08530d..645e026 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -467,12 +468,12 @@ public abstract class HBaseTestCase extends TestCase { * A class that makes a {@link Incommon} out of a {@link HTable} */ public static class HTableIncommon implements Incommon { - final HTable table; + final HTableInterface table; /** * @param table */ - public HTableIncommon(final HTable table) { + public HTableIncommon(final HTableInterface table) { super(); this.table = table; } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index c67f3f8..1b21d92 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -62,6 +62,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -899,7 +900,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { this.hbaseCluster = new MiniHBaseCluster(c, numMasters, numSlaves, masterClass, regionserverClass); // Don't leave here till we've done a successful scan of the hbase:meta - HTable t = new HTable(c, TableName.META_TABLE_NAME); + HTableInterface t = new HTable(c, TableName.META_TABLE_NAME); ResultScanner s = t.getScanner(new Scan()); while (s.next() != null) { continue; @@ -921,7 +922,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { public void restartHBaseCluster(int servers) throws IOException, InterruptedException { this.hbaseCluster = new MiniHBaseCluster(this.conf, servers); // Don't leave here till we've done a successful scan of the hbase:meta - HTable t = new HTable(new Configuration(this.conf), TableName.META_TABLE_NAME); + HTableInterface t = new HTable(new Configuration(this.conf), TableName.META_TABLE_NAME); ResultScanner s = t.getScanner(new Scan()); while (s.next() != null) { // do nothing @@ -1687,7 +1688,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { * @return Count of rows loaded. * @throws IOException */ - public int loadTable(final HTable t, final byte[] f) throws IOException { + public int loadTable(final HTableInterface t, final byte[] f) throws IOException { return loadTable(t, new byte[][] {f}); } @@ -1698,7 +1699,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { * @return Count of rows loaded. * @throws IOException */ - public int loadTable(final HTable t, final byte[] f, boolean writeToWAL) throws IOException { + public int loadTable(final HTableInterface t, final byte[] f, boolean writeToWAL) throws IOException { return loadTable(t, new byte[][] {f}, null, writeToWAL); } @@ -1709,7 +1710,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { * @return Count of rows loaded. * @throws IOException */ - public int loadTable(final HTable t, final byte[][] f) throws IOException { + public int loadTable(final HTableInterface t, final byte[][] f) throws IOException { return loadTable(t, f, null); } @@ -1721,7 +1722,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { * @return Count of rows loaded. * @throws IOException */ - public int loadTable(final HTable t, final byte[][] f, byte[] value) throws IOException { + public int loadTable(final HTableInterface t, final byte[][] f, byte[] value) throws IOException { return loadTable(t, f, value, true); } @@ -1733,7 +1734,8 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { * @return Count of rows loaded. * @throws IOException */ - public int loadTable(final HTable t, final byte[][] f, byte[] value, boolean writeToWAL) throws IOException { + public int loadTable(final HTableInterface t, final byte[][] f, byte[] value, boolean writeToWAL) + throws IOException { t.setAutoFlush(false); int rowCount = 0; for (byte[] row : HBaseTestingUtility.ROWS) { @@ -1750,7 +1752,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { } /** A tracker for tracking and validating table rows - * generated with {@link HBaseTestingUtility#loadTable(HTable, byte[])} + * generated with {@link HBaseTestingUtility#loadTable(HTableInterface, byte[])} */ public static class SeenRowTracker { int dim = 'z' - 'a' + 1; @@ -1848,7 +1850,8 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { return rowCount; } - public void loadNumericRows(final HTable t, final byte[] f, int startRow, int endRow) throws IOException { + public void loadNumericRows(final HTableInterface t, final byte[] f, int startRow, int endRow) + throws IOException { for (int i = startRow; i < endRow; i++) { byte[] data = Bytes.toBytes(String.valueOf(i)); Put put = new Put(data); @@ -1857,7 +1860,8 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { } } - public void deleteNumericRows(final HTable t, final byte[] f, int startRow, int endRow) throws IOException { + public void deleteNumericRows(final HTableInterface t, final byte[] f, int startRow, int endRow) + throws IOException { for (int i = startRow; i < endRow; i++) { byte[] data = Bytes.toBytes(String.valueOf(i)); Delete delete = new Delete(data); @@ -1869,7 +1873,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { /** * Return the number of rows in the given table. */ - public int countRows(final HTable table) throws IOException { + public int countRows(final HTableInterface table) throws IOException { Scan scan = new Scan(); ResultScanner results = table.getScanner(scan); int count = 0; @@ -1880,7 +1884,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { return count; } - public int countRows(final HTable table, final byte[]... families) throws IOException { + public int countRows(final HTableInterface table, final byte[]... families) throws IOException { Scan scan = new Scan(); for (byte[] family: families) { scan.addFamily(family); @@ -1897,7 +1901,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { /** * Return an md5 digest of the entire contents of a table. */ - public String checksumRows(final HTable table) throws Exception { + public String checksumRows(final HTableInterface table) throws Exception { Scan scan = new Scan(); ResultScanner results = table.getScanner(scan); MessageDigest digest = MessageDigest.getInstance("MD5"); @@ -2003,7 +2007,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { final byte[] columnFamily, byte [][] startKeys) throws IOException { Arrays.sort(startKeys, Bytes.BYTES_COMPARATOR); - HTable meta = new HTable(c, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(c, TableName.META_TABLE_NAME); HTableDescriptor htd = table.getTableDescriptor(); if(!htd.hasFamily(columnFamily)) { HColumnDescriptor hcd = new HColumnDescriptor(columnFamily); @@ -2068,7 +2072,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { public List createMultiRegionsInMeta(final Configuration conf, final HTableDescriptor htd, byte [][] startKeys) throws IOException { - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); Arrays.sort(startKeys, Bytes.BYTES_COMPARATOR); List newRegions = new ArrayList(startKeys.length); // add custom ones @@ -2091,7 +2095,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { */ public List getMetaTableRows() throws IOException { // TODO: Redo using MetaTableAccessor class - HTable t = new HTable(new Configuration(this.conf), TableName.META_TABLE_NAME); + HTableInterface t = new HTable(new Configuration(this.conf), TableName.META_TABLE_NAME); List rows = new ArrayList(); ResultScanner s = t.getScanner(new Scan()); for (Result result : s) { @@ -2111,7 +2115,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { */ public List getMetaTableRows(TableName tableName) throws IOException { // TODO: Redo using MetaTableAccessor. - HTable t = new HTable(new Configuration(this.conf), TableName.META_TABLE_NAME); + HTableInterface t = new HTable(new Configuration(this.conf), TableName.META_TABLE_NAME); List rows = new ArrayList(); ResultScanner s = t.getScanner(new Scan()); for (Result result : s) { @@ -2894,7 +2898,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { */ public void waitUntilAllRegionsAssigned(final TableName tableName, final long timeout) throws IOException { - final HTable meta = new HTable(getConfiguration(), TableName.META_TABLE_NAME); + final HTableInterface meta = new HTable(getConfiguration(), TableName.META_TABLE_NAME); try { waitFor(timeout, 200, true, new Predicate() { @Override diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java index 484a00c..4147614 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java @@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -146,7 +147,7 @@ public class ScanPerformanceEvaluation extends AbstractHBaseTool { Stopwatch scanTimer = new Stopwatch(); tableOpenTimer.start(); - HTable table = new HTable(getConf(), TableName.valueOf(tablename)); + HTableInterface table = new HTable(getConf(), TableName.valueOf(tablename)); tableOpenTimer.stop(); Scan scan = getScan(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java index d73dd38..1b4609f 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -97,7 +98,7 @@ public class TestAcidGuarantees implements Tool { byte data[] = new byte[10]; byte targetRows[][]; byte targetFamilies[][]; - HTable table; + HTableInterface table; AtomicLong numWritten = new AtomicLong(); public AtomicityWriter(TestContext ctx, byte targetRows[][], @@ -131,7 +132,7 @@ public class TestAcidGuarantees implements Tool { public static class AtomicGetReader extends RepeatingTestThread { byte targetRow[]; byte targetFamilies[][]; - HTable table; + HTableInterface table; int numVerified = 0; AtomicLong numRead = new AtomicLong(); @@ -189,7 +190,7 @@ public class TestAcidGuarantees implements Tool { */ public static class AtomicScanReader extends RepeatingTestThread { byte targetFamilies[][]; - HTable table; + HTableInterface table; AtomicLong numScans = new AtomicLong(); AtomicLong numRowsScanned = new AtomicLong(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java index 8e448e8..9926604 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java @@ -29,6 +29,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; @@ -87,8 +88,8 @@ public class TestHBaseTestingUtility { final byte[] QUAL_NAME = Bytes.toBytes("qual"); final byte[] VALUE = Bytes.toBytes("value"); - HTable table1 = htu1.createTable(TABLE_NAME, FAM_NAME); - HTable table2 = htu2.createTable(TABLE_NAME, FAM_NAME); + HTableInterface table1 = htu1.createTable(TABLE_NAME, FAM_NAME); + HTableInterface table2 = htu2.createTable(TABLE_NAME, FAM_NAME); Put put = new Put(ROW); put.add(FAM_NAME, QUAL_NAME, VALUE); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java index 7dec203..9f0b8b3 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy; @@ -260,7 +261,7 @@ public class TestIOFencing { admin = new HBaseAdmin(c); LOG.info("Creating table"); TEST_UTIL.createTable(TABLE_NAME, FAMILY); - HTable table = new HTable(c, TABLE_NAME); + HTableInterface table = new HTable(c, TABLE_NAME); LOG.info("Loading test table"); // Find the region List testRegions = TEST_UTIL.getMiniHBaseCluster().findRegionsForTable(TABLE_NAME); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaMigrationConvertingToPB.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaMigrationConvertingToPB.java index 4239dfb..991d1b5 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaMigrationConvertingToPB.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaMigrationConvertingToPB.java @@ -37,6 +37,7 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.HConnection; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.migration.NamespaceUpgrade; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; @@ -126,7 +127,7 @@ public class TestMetaMigrationConvertingToPB { // Assert we are running against the copied-up filesystem. The copied-up // rootdir should have had a table named 'TestTable' in it. Assert it // present. - HTable t = new HTable(TEST_UTIL.getConfiguration(), TESTTABLE); + HTableInterface t = new HTable(TEST_UTIL.getConfiguration(), TESTTABLE); ResultScanner scanner = t.getScanner(new Scan()); int count = 0; while (scanner.next() != null) { @@ -334,7 +335,7 @@ public class TestMetaMigrationConvertingToPB { final TableName tableName, byte [][] startKeys) throws IOException { Arrays.sort(startKeys, Bytes.BYTES_COMPARATOR); - HTable meta = new HTable(c, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(c, TableName.META_TABLE_NAME); List newRegions = new ArrayList(startKeys.length); @@ -408,7 +409,7 @@ public class TestMetaMigrationConvertingToPB { final TableName tableName, byte [][] startKeys) throws IOException { Arrays.sort(startKeys, Bytes.BYTES_COMPARATOR); - HTable meta = new HTable(c, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(c, TableName.META_TABLE_NAME); List newRegions = new ArrayList(startKeys.length); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java index 1448bf2..fb56c2c 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; @@ -332,7 +333,7 @@ public class TestMetaTableAccessor { long seqNum100 = random.nextLong(); - HTable meta = MetaTableAccessor.getMetaHTable(hConnection); + HTableInterface meta = MetaTableAccessor.getMetaHTable(hConnection); try { MetaTableAccessor.updateRegionLocation(hConnection, primary, serverName0, seqNum0); @@ -359,7 +360,7 @@ public class TestMetaTableAccessor { } } - public static void assertMetaLocation(HTable meta, byte[] row, ServerName serverName, + public static void assertMetaLocation(HTableInterface meta, byte[] row, ServerName serverName, long seqNum, int replicaId, boolean checkSeqNum) throws IOException { Get get = new Get(row); Result result = meta.get(get); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java index c04edc1..76d7627 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.HBaseTestCase.Incommon; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -97,7 +98,7 @@ public class TestMultiVersions { hcd.setMaxVersions(3); desc.addFamily(hcd); this.admin.createTable(desc); - HTable table = new HTable(UTIL.getConfiguration(), desc.getTableName()); + HTableInterface table = new HTable(UTIL.getConfiguration(), desc.getTableName()); // TODO: Remove these deprecated classes or pull them in here if this is // only test using them. Incommon incommon = new HTableIncommon(table); @@ -140,7 +141,7 @@ public class TestMultiVersions { this.admin.createTable(desc); Put put = new Put(row, timestamp1); put.add(contents, contents, value1); - HTable table = new HTable(UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(UTIL.getConfiguration(), tableName); table.put(put); // Shut down and restart the HBase cluster table.close(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java index 66d23d9..f005b96 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.util.Bytes; @@ -250,7 +251,7 @@ public class TestNamespace { } //sanity check try to write and read from table - HTable table = new HTable(TEST_UTIL.getConfiguration(), desc.getTableName()); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), desc.getTableName()); Put p = new Put(Bytes.toBytes("row1")); p.add(Bytes.toBytes("my_cf"),Bytes.toBytes("my_col"),Bytes.toBytes("value1")); table.put(p); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java index e65430b..8462494 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -257,7 +258,7 @@ public class TestZooKeeper { admin.close(); } - HTable table = + HTableInterface table = new HTable(new Configuration(TEST_UTIL.getConfiguration()), tableName); Put put = new Put(Bytes.toBytes("testrow")); put.add(Bytes.toBytes("fam"), @@ -270,11 +271,11 @@ public class TestZooKeeper { @Test public void testMultipleZK() throws IOException, NoSuchMethodException, InvocationTargetException, IllegalAccessException { - HTable localMeta = + HTableInterface localMeta = new HTable(new Configuration(TEST_UTIL.getConfiguration()), TableName.META_TABLE_NAME); Configuration otherConf = new Configuration(TEST_UTIL.getConfiguration()); otherConf.set(HConstants.ZOOKEEPER_QUORUM, "127.0.0.1"); - HTable ipMeta = new HTable(otherConf, TableName.META_TABLE_NAME); + HTableInterface ipMeta = new HTable(otherConf, TableName.META_TABLE_NAME); // dummy, just to open the connection final byte [] row = new byte [] {'r'}; @@ -531,7 +532,7 @@ public class TestZooKeeper { HMaster m = cluster.getMaster(); // now the cluster is up. So assign some regions. HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); - HTable table = null; + HTableInterface table = null; try { byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("1"), Bytes.toBytes("2"), Bytes.toBytes("3"), Bytes.toBytes("4"), Bytes.toBytes("5") }; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java index c24b4e1..26b074b 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java @@ -246,7 +246,7 @@ public class TestAdmin { final byte [] qualifier = Bytes.toBytes("qualifier"); final byte [] value = Bytes.toBytes("value"); final TableName table = TableName.valueOf("testDisableAndEnableTable"); - HTable ht = TEST_UTIL.createTable(table, HConstants.CATALOG_FAMILY); + HTableInterface ht = TEST_UTIL.createTable(table, HConstants.CATALOG_FAMILY); Put put = new Put(row); put.add(HConstants.CATALOG_FAMILY, qualifier, value); ht.put(put); @@ -291,8 +291,8 @@ public class TestAdmin { final byte [] value = Bytes.toBytes("value"); final byte [] table1 = Bytes.toBytes("testDisableAndEnableTable1"); final byte [] table2 = Bytes.toBytes("testDisableAndEnableTable2"); - HTable ht1 = TEST_UTIL.createTable(table1, HConstants.CATALOG_FAMILY); - HTable ht2 = TEST_UTIL.createTable(table2, HConstants.CATALOG_FAMILY); + HTableInterface ht1 = TEST_UTIL.createTable(table1, HConstants.CATALOG_FAMILY); + HTableInterface ht2 = TEST_UTIL.createTable(table2, HConstants.CATALOG_FAMILY); Put put = new Put(row); put.add(HConstants.CATALOG_FAMILY, qualifier, value); ht1.put(put); @@ -365,7 +365,7 @@ public class TestAdmin { splitKeys[1] = Bytes.toBytes(8); // Create & Fill the table - HTable table = TEST_UTIL.createTable(tableName, HConstants.CATALOG_FAMILY, splitKeys); + HTableInterface table = TEST_UTIL.createTable(tableName, HConstants.CATALOG_FAMILY, splitKeys); try { TEST_UTIL.loadNumericRows(table, HConstants.CATALOG_FAMILY, 0, 10); assertEquals(10, TEST_UTIL.countRows(table)); @@ -400,7 +400,7 @@ public class TestAdmin { htd.addFamily(fam2); htd.addFamily(fam3); this.admin.createTable(htd); - HTable table = new HTable(TEST_UTIL.getConfiguration(), "myTestTable"); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), "myTestTable"); HTableDescriptor confirmedHtd = table.getTableDescriptor(); assertEquals(htd.compareTo(confirmedHtd), 0); table.close(); @@ -1254,7 +1254,7 @@ public class TestAdmin { @Test (timeout=300000) public void testReadOnlyTable() throws Exception { byte [] name = Bytes.toBytes("testReadOnlyTable"); - HTable table = TEST_UTIL.createTable(name, HConstants.CATALOG_FAMILY); + HTableInterface table = TEST_UTIL.createTable(name, HConstants.CATALOG_FAMILY); byte[] value = Bytes.toBytes("somedata"); // This used to use an empty row... That must have been a bug Put put = new Put(value); @@ -1323,7 +1323,7 @@ public class TestAdmin { @Test (expected=TableNotDisabledException.class, timeout=300000) public void testTableNotDisabledExceptionWithATable() throws IOException { final TableName name = TableName.valueOf("testTableNotDisabledExceptionWithATable"); - HTable t = TEST_UTIL.createTable(name, HConstants.CATALOG_FAMILY); + HTableInterface t = TEST_UTIL.createTable(name, HConstants.CATALOG_FAMILY); try { this.admin.enableTable(name); }finally { @@ -1337,7 +1337,7 @@ public class TestAdmin { */ @Test (expected=TableNotFoundException.class, timeout=300000) public void testTableNotFoundExceptionWithoutAnyTables() throws IOException { - HTable ht = + HTableInterface ht = new HTable(TEST_UTIL.getConfiguration(),"testTableNotFoundExceptionWithoutAnyTables"); ht.get(new Get("e".getBytes())); } @@ -1658,7 +1658,7 @@ public class TestAdmin { HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName)); desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY)); admin.createTable(desc); - HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); HRegionServer regionServer = TEST_UTIL.getRSForFirstRegionInTable(Bytes.toBytes(tableName)); for (int i = 1; i <= 256; i++) { // 256 writes should cause 8 log rolls diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java index bcae508..73a8400 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java @@ -83,7 +83,7 @@ public class TestClientOperationInterrupt { } util.createTable(tableName, new byte[][]{dummy, test}); - HTable ht = new HTable(conf, tableName); + HTableInterface ht = new HTable(conf, tableName); Put p = new Put(row1); p.add(dummy, dummy, dummy); ht.put(p); @@ -105,7 +105,7 @@ public class TestClientOperationInterrupt { @Override public void run() { try { - HTable ht = new HTable(conf, tableName); + HTableInterface ht = new HTable(conf, tableName); Result r = ht.get(new Get(row1)); noEx.incrementAndGet(); } catch (IOException e) { @@ -154,7 +154,7 @@ public class TestClientOperationInterrupt { Thread.sleep(1); } - HTable ht = new HTable(conf, tableName); + HTableInterface ht = new HTable(conf, tableName); Result r = ht.get(new Get(row1)); Assert.assertFalse(r.isEmpty()); } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java index 95b9b21..0149cd1 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java @@ -84,7 +84,7 @@ public class TestClientScannerRPCTimeout { @Test public void testScannerNextRPCTimesout() throws Exception { final byte[] TABLE_NAME = Bytes.toBytes("testScannerNextRPCTimesout"); - HTable ht = TEST_UTIL.createTable(TABLE_NAME, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE_NAME, FAMILY); byte[] r1 = Bytes.toBytes("row-1"); byte[] r2 = Bytes.toBytes("row-2"); byte[] r3 = Bytes.toBytes("row-3"); @@ -123,7 +123,7 @@ public class TestClientScannerRPCTimeout { RSRpcServicesWithScanTimeout.tryNumber <= CLIENT_RETRIES_NUMBER); } - private void putToTable(HTable ht, byte[] rowkey) throws IOException { + private void putToTable(HTableInterface ht, byte[] rowkey) throws IOException { Put put = new Put(rowkey); put.add(FAMILY, QUALIFIER, VALUE); ht.put(put); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java index 1dfcfbd..5014af5 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java @@ -105,7 +105,7 @@ public class TestCloneSnapshotFromClient { // take an empty snapshot admin.snapshot(emptySnapshot, tableName); - HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); try { // enable table and insert data admin.enableTable(tableName); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java index 975140c..abdcd75 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java @@ -184,7 +184,7 @@ public class TestFromClientSide { desc.addFamily(hcd); TEST_UTIL.getHBaseAdmin().createTable(desc); Configuration c = TEST_UTIL.getConfiguration(); - HTable h = new HTable(c, TABLENAME); + HTableInterface h = new HTable(c, TABLENAME); long ts = System.currentTimeMillis(); Put p = new Put(T1, ts); @@ -244,7 +244,7 @@ public class TestFromClientSide { final byte[] COLUMN = Bytes.toBytes("column"); final byte[] VALUE = Bytes.toBytes("value"); - HTable table = TEST_UTIL.createTable(TABLENAME, FAMILY); + HTableInterface table = TEST_UTIL.createTable(TABLENAME, FAMILY); // future timestamp long ts = System.currentTimeMillis() * 2; @@ -415,7 +415,7 @@ public class TestFromClientSide { putRows(ht, 3, value2, keyPrefix1); putRows(ht, 3, value2, keyPrefix2); putRows(ht, 3, value2, keyPrefix3); - HTable table = new HTable(TEST_UTIL.getConfiguration(), TABLE); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), TABLE); System.out.println("Checking values for key: " + keyPrefix1); assertEquals("Got back incorrect number of rows from scan", 3, getNumberOfRows(keyPrefix1, value2, table)); @@ -442,7 +442,7 @@ public class TestFromClientSide { getNumberOfRows(keyPrefix2, value2, table)); } - private void deleteColumns(HTable ht, String value, String keyPrefix) + private void deleteColumns(HTableInterface ht, String value, String keyPrefix) throws IOException { ResultScanner scanner = buildScanner(keyPrefix, value, ht); Iterator it = scanner.iterator(); @@ -457,7 +457,7 @@ public class TestFromClientSide { assertEquals("Did not perform correct number of deletes", 3, count); } - private int getNumberOfRows(String keyPrefix, String value, HTable ht) + private int getNumberOfRows(String keyPrefix, String value, HTableInterface ht) throws Exception { ResultScanner resultScanner = buildScanner(keyPrefix, value, ht); Iterator scanner = resultScanner.iterator(); @@ -474,7 +474,7 @@ public class TestFromClientSide { return numberOfResults; } - private ResultScanner buildScanner(String keyPrefix, String value, HTable ht) + private ResultScanner buildScanner(String keyPrefix, String value, HTableInterface ht) throws IOException { // OurFilterList allFilters = new OurFilterList(); FilterList allFilters = new FilterList(/* FilterList.Operator.MUST_PASS_ALL */); @@ -500,7 +500,7 @@ public class TestFromClientSide { return ht.getScanner(scan); } - private void putRows(HTable ht, int numRows, String value, String key) + private void putRows(HTableInterface ht, int numRows, String value, String key) throws IOException { for (int i = 0; i < numRows; i++) { String row = key + "_" + UUID.randomUUID().toString(); @@ -614,7 +614,7 @@ public class TestFromClientSide { * @return Count of rows in table. * @throws IOException */ - private int countRows(final HTable t, final Scan s) + private int countRows(final HTableInterface t, final Scan s) throws IOException { // Assert all rows in table. ResultScanner scanner = t.getScanner(s); @@ -627,7 +627,7 @@ public class TestFromClientSide { return count; } - private void assertRowCount(final HTable t, final int expected) + private void assertRowCount(final HTableInterface t, final int expected) throws IOException { assertEquals(expected, countRows(t, new Scan())); } @@ -675,7 +675,7 @@ public class TestFromClientSide { @Test public void testSuperSimple() throws Exception { byte [] TABLE = Bytes.toBytes("testSuperSimple"); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); Put put = new Put(ROW); put.add(FAMILY, QUALIFIER, VALUE); ht.put(put); @@ -692,7 +692,7 @@ public class TestFromClientSide { byte [] TABLE = Bytes.toBytes("testMaxKeyValueSize"); Configuration conf = TEST_UTIL.getConfiguration(); String oldMaxSize = conf.get("hbase.client.keyvalue.maxsize"); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); byte[] value = new byte[4 * 1024 * 1024]; Put put = new Put(ROW); put.add(FAMILY, QUALIFIER, value); @@ -712,7 +712,7 @@ public class TestFromClientSide { @Test public void testFilters() throws Exception { byte [] TABLE = Bytes.toBytes("testFilters"); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); byte [][] ROWS = makeN(ROW, 10); byte [][] QUALIFIERS = { Bytes.toBytes("col0--"), Bytes.toBytes("col1--"), @@ -748,7 +748,7 @@ public class TestFromClientSide { @Test public void testKeyOnlyFilter() throws Exception { byte [] TABLE = Bytes.toBytes("testKeyOnlyFilter"); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); byte [][] ROWS = makeN(ROW, 10); byte [][] QUALIFIERS = { Bytes.toBytes("col0--"), Bytes.toBytes("col1--"), @@ -785,7 +785,7 @@ public class TestFromClientSide { @Test public void testSimpleMissing() throws Exception { byte [] TABLE = Bytes.toBytes("testSimpleMissing"); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); byte [][] ROWS = makeN(ROW, 4); // Try to get a row on an empty table @@ -901,7 +901,7 @@ public class TestFromClientSide { byte [][] QUALIFIERS = makeN(QUALIFIER, 10); byte [][] VALUES = makeN(VALUE, 10); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES); Get get; Scan scan; @@ -1210,7 +1210,7 @@ public class TestFromClientSide { fail("Creating a table with a null family passed, should fail"); } catch(Exception e) {} - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); // Null row (should NOT work) try { @@ -1309,7 +1309,7 @@ public class TestFromClientSide { long [] STAMPS = makeStamps(20); byte [][] VALUES = makeNAscii(VALUE, 20); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); // Insert 4 versions of same column Put put = new Put(ROW); @@ -1523,7 +1523,7 @@ public class TestFromClientSide { int [] LIMITS = {1,3,5}; long [] STAMPS = makeStamps(10); byte [][] VALUES = makeNAscii(VALUE, 10); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, LIMITS); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, LIMITS); // Insert limit + 1 on each family Put put = new Put(ROW); @@ -1719,7 +1719,7 @@ public class TestFromClientSide { byte [][] VALUES = makeN(VALUE, 5); long [] ts = {1000, 2000, 3000, 4000, 5000}; - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY, 5); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY, 5); Put put = new Put(ROW); for (int q = 0; q < 1; q++) @@ -1758,7 +1758,7 @@ public class TestFromClientSide { long [] ts = {1000, 2000, 3000, 4000, 5000}; HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY, 5); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY, 5); Put put = null; Result result = null; Get get = null; @@ -1873,7 +1873,7 @@ public class TestFromClientSide { byte [][] VALUES = makeN(VALUE, 5); long [] ts = {1000, 2000, 3000, 4000, 5000}; - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, 3); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, 3); Put put = new Put(ROW); put.add(FAMILIES[0], QUALIFIER, ts[0], VALUES[0]); @@ -2240,7 +2240,7 @@ public class TestFromClientSide { byte [][] ROWS = makeN(ROW, numRows); byte [][] QUALIFIERS = makeN(QUALIFIER, numColsPerRow); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); // Insert rows @@ -2323,7 +2323,7 @@ public class TestFromClientSide { byte [][] VALUES = makeNAscii(VALUE, 7); long [] STAMPS = makeStamps(7); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); // Insert three versions @@ -2387,7 +2387,7 @@ public class TestFromClientSide { byte [][] VALUES = makeNAscii(VALUE, 7); long [] STAMPS = makeStamps(7); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); // Insert lots versions @@ -2434,7 +2434,7 @@ public class TestFromClientSide { byte [] TABLE = Bytes.toBytes("testJiraTest1014"); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); long manualStamp = 12345; @@ -2461,7 +2461,7 @@ public class TestFromClientSide { byte [][] VALUES = makeNAscii(VALUE, 7); long [] STAMPS = makeStamps(7); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); // Insert lots versions @@ -2504,7 +2504,7 @@ public class TestFromClientSide { byte [][] VALUES = makeNAscii(VALUE, 7); long [] STAMPS = makeStamps(7); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); // Insert lots versions @@ -2533,7 +2533,7 @@ public class TestFromClientSide { // Bulk Testers // - private void getVersionRangeAndVerifyGreaterThan(HTable ht, byte [] row, + private void getVersionRangeAndVerifyGreaterThan(HTableInterface ht, byte [] row, byte [] family, byte [] qualifier, long [] stamps, byte [][] values, int start, int end) throws IOException { @@ -2545,7 +2545,7 @@ public class TestFromClientSide { assertNResult(result, row, family, qualifier, stamps, values, start+1, end); } - private void getVersionRangeAndVerify(HTable ht, byte [] row, byte [] family, + private void getVersionRangeAndVerify(HTableInterface ht, byte [] row, byte [] family, byte [] qualifier, long [] stamps, byte [][] values, int start, int end) throws IOException { Get get = new Get(row); @@ -2556,7 +2556,7 @@ public class TestFromClientSide { assertNResult(result, row, family, qualifier, stamps, values, start, end); } - private void getAllVersionsAndVerify(HTable ht, byte [] row, byte [] family, + private void getAllVersionsAndVerify(HTableInterface ht, byte [] row, byte [] family, byte [] qualifier, long [] stamps, byte [][] values, int start, int end) throws IOException { Get get = new Get(row); @@ -2566,7 +2566,7 @@ public class TestFromClientSide { assertNResult(result, row, family, qualifier, stamps, values, start, end); } - private void scanVersionRangeAndVerifyGreaterThan(HTable ht, byte [] row, + private void scanVersionRangeAndVerifyGreaterThan(HTableInterface ht, byte [] row, byte [] family, byte [] qualifier, long [] stamps, byte [][] values, int start, int end) throws IOException { @@ -2578,7 +2578,7 @@ public class TestFromClientSide { assertNResult(result, row, family, qualifier, stamps, values, start+1, end); } - private void scanVersionRangeAndVerify(HTable ht, byte [] row, byte [] family, + private void scanVersionRangeAndVerify(HTableInterface ht, byte [] row, byte [] family, byte [] qualifier, long [] stamps, byte [][] values, int start, int end) throws IOException { Scan scan = new Scan(row); @@ -2589,7 +2589,7 @@ public class TestFromClientSide { assertNResult(result, row, family, qualifier, stamps, values, start, end); } - private void scanAllVersionsAndVerify(HTable ht, byte [] row, byte [] family, + private void scanAllVersionsAndVerify(HTableInterface ht, byte [] row, byte [] family, byte [] qualifier, long [] stamps, byte [][] values, int start, int end) throws IOException { Scan scan = new Scan(row); @@ -2599,7 +2599,7 @@ public class TestFromClientSide { assertNResult(result, row, family, qualifier, stamps, values, start, end); } - private void getVersionAndVerify(HTable ht, byte [] row, byte [] family, + private void getVersionAndVerify(HTableInterface ht, byte [] row, byte [] family, byte [] qualifier, long stamp, byte [] value) throws Exception { Get get = new Get(row); @@ -2610,7 +2610,7 @@ public class TestFromClientSide { assertSingleResult(result, row, family, qualifier, stamp, value); } - private void getVersionAndVerifyMissing(HTable ht, byte [] row, byte [] family, + private void getVersionAndVerifyMissing(HTableInterface ht, byte [] row, byte [] family, byte [] qualifier, long stamp) throws Exception { Get get = new Get(row); @@ -2621,7 +2621,7 @@ public class TestFromClientSide { assertEmptyResult(result); } - private void scanVersionAndVerify(HTable ht, byte [] row, byte [] family, + private void scanVersionAndVerify(HTableInterface ht, byte [] row, byte [] family, byte [] qualifier, long stamp, byte [] value) throws Exception { Scan scan = new Scan(row); @@ -2632,7 +2632,7 @@ public class TestFromClientSide { assertSingleResult(result, row, family, qualifier, stamp, value); } - private void scanVersionAndVerifyMissing(HTable ht, byte [] row, + private void scanVersionAndVerifyMissing(HTableInterface ht, byte [] row, byte [] family, byte [] qualifier, long stamp) throws Exception { Scan scan = new Scan(row); @@ -2643,7 +2643,7 @@ public class TestFromClientSide { assertNullResult(result); } - private void getTestNull(HTable ht, byte [] row, byte [] family, + private void getTestNull(HTableInterface ht, byte [] row, byte [] family, byte [] value) throws Exception { @@ -2668,12 +2668,12 @@ public class TestFromClientSide { } - private void scanTestNull(HTable ht, byte[] row, byte[] family, byte[] value) + private void scanTestNull(HTableInterface ht, byte[] row, byte[] family, byte[] value) throws Exception { scanTestNull(ht, row, family, value, false); } - private void scanTestNull(HTable ht, byte[] row, byte[] family, byte[] value, + private void scanTestNull(HTableInterface ht, byte[] row, byte[] family, byte[] value, boolean isReversedScan) throws Exception { Scan scan = new Scan(); @@ -2701,7 +2701,7 @@ public class TestFromClientSide { } - private void singleRowGetTest(HTable ht, byte [][] ROWS, byte [][] FAMILIES, + private void singleRowGetTest(HTableInterface ht, byte [][] ROWS, byte [][] FAMILIES, byte [][] QUALIFIERS, byte [][] VALUES) throws Exception { @@ -2799,7 +2799,7 @@ public class TestFromClientSide { } - private void singleRowScanTest(HTable ht, byte [][] ROWS, byte [][] FAMILIES, + private void singleRowScanTest(HTableInterface ht, byte [][] ROWS, byte [][] FAMILIES, byte [][] QUALIFIERS, byte [][] VALUES) throws Exception { @@ -2901,7 +2901,7 @@ public class TestFromClientSide { * Expects family and qualifier arrays to be valid for at least * the range: idx-2 < idx < idx+2 */ - private void getVerifySingleColumn(HTable ht, + private void getVerifySingleColumn(HTableInterface ht, byte [][] ROWS, int ROWIDX, byte [][] FAMILIES, int FAMILYIDX, byte [][] QUALIFIERS, int QUALIFIERIDX, @@ -2959,7 +2959,7 @@ public class TestFromClientSide { * the range: idx-2 to idx+2 * Expects row array to be valid for at least idx to idx+2 */ - private void scanVerifySingleColumn(HTable ht, + private void scanVerifySingleColumn(HTableInterface ht, byte [][] ROWS, int ROWIDX, byte [][] FAMILIES, int FAMILYIDX, byte [][] QUALIFIERS, int QUALIFIERIDX, @@ -3019,7 +3019,7 @@ public class TestFromClientSide { * Verify we do not read any values by accident around a single column * Same requirements as getVerifySingleColumn */ - private void getVerifySingleEmpty(HTable ht, + private void getVerifySingleEmpty(HTableInterface ht, byte [][] ROWS, int ROWIDX, byte [][] FAMILIES, int FAMILYIDX, byte [][] QUALIFIERS, int QUALIFIERIDX) @@ -3050,7 +3050,7 @@ public class TestFromClientSide { } - private void scanVerifySingleEmpty(HTable ht, + private void scanVerifySingleEmpty(HTableInterface ht, byte [][] ROWS, int ROWIDX, byte [][] FAMILIES, int FAMILYIDX, byte [][] QUALIFIERS, int QUALIFIERIDX) @@ -3275,7 +3275,7 @@ public class TestFromClientSide { // Helpers // - private Result getSingleScanResult(HTable ht, Scan scan) throws IOException { + private Result getSingleScanResult(HTableInterface ht, Scan scan) throws IOException { ResultScanner scanner = ht.getScanner(scan); Result result = scanner.next(); scanner.close(); @@ -3335,7 +3335,7 @@ public class TestFromClientSide { long [] STAMPS = makeStamps(20); byte [][] VALUES = makeNAscii(VALUE, 20); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY, 10); // Insert 4 versions of same column Put put = new Put(ROW); @@ -3546,7 +3546,7 @@ public class TestFromClientSide { public void testUpdates() throws Exception { byte [] TABLE = Bytes.toBytes("testUpdates"); - HTable hTable = TEST_UTIL.createTable(TABLE, FAMILY, 10); + HTableInterface hTable = TEST_UTIL.createTable(TABLE, FAMILY, 10); // Write a column with values at timestamp 1, 2 and 3 byte[] row = Bytes.toBytes("row1"); @@ -3597,7 +3597,7 @@ public class TestFromClientSide { String tableName = "testUpdatesWithMajorCompaction"; byte [] TABLE = Bytes.toBytes(tableName); - HTable hTable = TEST_UTIL.createTable(TABLE, FAMILY, 10); + HTableInterface hTable = TEST_UTIL.createTable(TABLE, FAMILY, 10); HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); // Write a column with values at timestamp 1, 2 and 3 @@ -3659,7 +3659,7 @@ public class TestFromClientSide { String tableName = "testMajorCompactionBetweenTwoUpdates"; byte [] TABLE = Bytes.toBytes(tableName); - HTable hTable = TEST_UTIL.createTable(TABLE, FAMILY, 10); + HTableInterface hTable = TEST_UTIL.createTable(TABLE, FAMILY, 10); HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); // Write a column with values at timestamp 1, 2 and 3 @@ -3724,7 +3724,7 @@ public class TestFromClientSide { @Test public void testGet_EmptyTable() throws IOException { - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testGet_EmptyTable"), FAMILY); + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes("testGet_EmptyTable"), FAMILY); Get get = new Get(ROW); get.addFamily(FAMILY); Result r = table.get(get); @@ -3756,7 +3756,7 @@ public class TestFromClientSide { @Test public void testGet_NonExistentRow() throws IOException { - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testGet_NonExistentRow"), FAMILY); + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes("testGet_NonExistentRow"), FAMILY); Put put = new Put(ROW); put.add(FAMILY, QUALIFIER, VALUE); table.put(put); @@ -3783,7 +3783,7 @@ public class TestFromClientSide { final byte [] row1 = Bytes.toBytes("row1"); final byte [] row2 = Bytes.toBytes("row2"); final byte [] value = Bytes.toBytes("abcd"); - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testPut"), + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes("testPut"), new byte [][] {CONTENTS_FAMILY, SMALL_FAMILY}); Put put = new Put(row1); put.add(CONTENTS_FAMILY, null, value); @@ -3820,7 +3820,8 @@ public class TestFromClientSide { public void testPutNoCF() throws IOException { final byte[] BAD_FAM = Bytes.toBytes("BAD_CF"); final byte[] VAL = Bytes.toBytes(100); - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testPutNoCF"), new byte[][]{FAMILY}); + HTableInterface table = + TEST_UTIL.createTable(Bytes.toBytes("testPutNoCF"), new byte[][] { FAMILY }); boolean caughtNSCFE = false; @@ -3841,7 +3842,7 @@ public class TestFromClientSide { final byte[] SMALL_FAMILY = Bytes.toBytes("smallfam"); final int NB_BATCH_ROWS = 10; final byte[] value = Bytes.toBytes("abcd"); - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testRowsPut"), + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes("testRowsPut"), new byte[][] {CONTENTS_FAMILY, SMALL_FAMILY }); ArrayList rowsUpdate = new ArrayList(); for (int i = 0; i < NB_BATCH_ROWS; i++) { @@ -3868,7 +3869,7 @@ public class TestFromClientSide { final byte [] SMALL_FAMILY = Bytes.toBytes("smallfam"); final byte [] value = Bytes.toBytes("abcd"); final int NB_BATCH_ROWS = 10; - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testRowsPutBufferedOneFlush"), + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes("testRowsPutBufferedOneFlush"), new byte [][] {CONTENTS_FAMILY, SMALL_FAMILY}); table.setAutoFlush(false, true); ArrayList rowsUpdate = new ArrayList(); @@ -3909,7 +3910,7 @@ public class TestFromClientSide { final byte[] SMALL_FAMILY = Bytes.toBytes("smallfam"); final byte[] value = Bytes.toBytes("abcd"); final int NB_BATCH_ROWS = 10; - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testRowsPutBufferedManyManyFlushes"), + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes("testRowsPutBufferedManyManyFlushes"), new byte[][] {CONTENTS_FAMILY, SMALL_FAMILY }); table.setAutoFlush(false, true); table.setWriteBufferSize(10); @@ -4156,14 +4157,14 @@ public class TestFromClientSide { final byte[] attrValue = Bytes.toBytes("somevalue"); byte[] value = Bytes.toBytes("value"); - HTable a = TEST_UTIL.createTable(tableAname, HConstants.CATALOG_FAMILY); - HTable b = TEST_UTIL.createTable(tableBname, HConstants.CATALOG_FAMILY); + HTableInterface a = TEST_UTIL.createTable(tableAname, HConstants.CATALOG_FAMILY); + HTableInterface b = TEST_UTIL.createTable(tableBname, HConstants.CATALOG_FAMILY); Put put = new Put(ROW); put.add(HConstants.CATALOG_FAMILY, null, value); a.put(put); // open a new connection to A and a connection to b - HTable newA = new HTable(TEST_UTIL.getConfiguration(), tableAname); + HTableInterface newA = new HTable(TEST_UTIL.getConfiguration(), tableAname); // copy data from A to B Scan scan = new Scan(); @@ -4183,7 +4184,7 @@ public class TestFromClientSide { } // Opening a new connection to A will cause the tables to be reloaded - HTable anotherA = new HTable(TEST_UTIL.getConfiguration(), tableAname); + HTableInterface anotherA = new HTable(TEST_UTIL.getConfiguration(), tableAname); Get get = new Get(ROW); get.addFamily(HConstants.CATALOG_FAMILY); anotherA.get(get); @@ -4345,7 +4346,7 @@ public class TestFromClientSide { final byte [] TABLENAME = Bytes.toBytes("testMultiRowMutation"); final byte [] ROW1 = Bytes.toBytes("testRow1"); - HTable t = TEST_UTIL.createTable(TABLENAME, FAMILY); + HTableInterface t = TEST_UTIL.createTable(TABLENAME, FAMILY); Put p = new Put(ROW); p.add(FAMILY, QUALIFIER, VALUE); MutationProto m1 = ProtobufUtil.toMutation(MutationType.PUT, p); @@ -4374,7 +4375,7 @@ public class TestFromClientSide { public void testRowMutation() throws Exception { LOG.info("Starting testRowMutation"); final byte [] TABLENAME = Bytes.toBytes("testRowMutation"); - HTable t = TEST_UTIL.createTable(TABLENAME, FAMILY); + HTableInterface t = TEST_UTIL.createTable(TABLENAME, FAMILY); byte [][] QUALIFIERS = new byte [][] { Bytes.toBytes("a"), Bytes.toBytes("b") }; @@ -4406,7 +4407,7 @@ public class TestFromClientSide { public void testAppend() throws Exception { LOG.info("Starting testAppend"); final byte [] TABLENAME = Bytes.toBytes("testAppend"); - HTable t = TEST_UTIL.createTable(TABLENAME, FAMILY); + HTableInterface t = TEST_UTIL.createTable(TABLENAME, FAMILY); byte[] v1 = Bytes.toBytes("42"); byte[] v2 = Bytes.toBytes("23"); byte [][] QUALIFIERS = new byte [][] { @@ -4436,7 +4437,7 @@ public class TestFromClientSide { LOG.info("Starting testIncrementWithDeletes"); final TableName TABLENAME = TableName.valueOf("testIncrementWithDeletes"); - HTable ht = TEST_UTIL.createTable(TABLENAME, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLENAME, FAMILY); final byte[] COLUMN = Bytes.toBytes("column"); ht.incrementColumnValue(ROW, FAMILY, COLUMN, 5); @@ -4457,7 +4458,7 @@ public class TestFromClientSide { public void testIncrementingInvalidValue() throws Exception { LOG.info("Starting testIncrementingInvalidValue"); final byte [] TABLENAME = Bytes.toBytes("testIncrementingInvalidValue"); - HTable ht = TEST_UTIL.createTable(TABLENAME, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLENAME, FAMILY); final byte[] COLUMN = Bytes.toBytes("column"); Put p = new Put(ROW); // write an integer here (not a Long) @@ -4483,7 +4484,7 @@ public class TestFromClientSide { public void testIncrementInvalidArguments() throws Exception { LOG.info("Starting testIncrementInvalidArguments"); final byte[] TABLENAME = Bytes.toBytes("testIncrementInvalidArguments"); - HTable ht = TEST_UTIL.createTable(TABLENAME, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLENAME, FAMILY); final byte[] COLUMN = Bytes.toBytes("column"); try { // try null row @@ -4538,7 +4539,7 @@ public class TestFromClientSide { public void testIncrementOutOfOrder() throws Exception { LOG.info("Starting testIncrementOutOfOrder"); final byte [] TABLENAME = Bytes.toBytes("testIncrementOutOfOrder"); - HTable ht = TEST_UTIL.createTable(TABLENAME, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLENAME, FAMILY); byte [][] QUALIFIERS = new byte [][] { Bytes.toBytes("B"), Bytes.toBytes("A"), Bytes.toBytes("C") @@ -4578,7 +4579,7 @@ public class TestFromClientSide { public void testIncrement() throws Exception { LOG.info("Starting testIncrement"); final byte [] TABLENAME = Bytes.toBytes("testIncrement"); - HTable ht = TEST_UTIL.createTable(TABLENAME, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLENAME, FAMILY); byte [][] ROWS = new byte [][] { Bytes.toBytes("a"), Bytes.toBytes("b"), Bytes.toBytes("c"), @@ -4656,7 +4657,7 @@ public class TestFromClientSide { conf.set(HConstants.HBASE_CLIENT_IPC_POOL_TYPE, "round-robin"); conf.setInt(HConstants.HBASE_CLIENT_IPC_POOL_SIZE, poolSize); - HTable table = TEST_UTIL.createTable(tableName, new byte[][] { FAMILY }, + HTableInterface table = TEST_UTIL.createTable(tableName, new byte[][] { FAMILY }, conf, Integer.MAX_VALUE); final long ts = EnvironmentEdgeManager.currentTimeMillis(); @@ -4693,7 +4694,7 @@ public class TestFromClientSide { conf.set(HConstants.HBASE_CLIENT_IPC_POOL_TYPE, "thread-local"); conf.setInt(HConstants.HBASE_CLIENT_IPC_POOL_SIZE, poolSize); - final HTable table = TEST_UTIL.createTable(tableName, + final HTableInterface table = TEST_UTIL.createTable(tableName, new byte[][] { FAMILY }, conf, 3); final long ts = EnvironmentEdgeManager.currentTimeMillis(); @@ -4771,7 +4772,7 @@ public class TestFromClientSide { final byte [] anotherrow = Bytes.toBytes("anotherrow"); final byte [] value2 = Bytes.toBytes("abcd"); - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testCheckAndPut"), + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes("testCheckAndPut"), new byte [][] {FAMILY}); Put put1 = new Put(ROW); put1.add(FAMILY, QUALIFIER, VALUE); @@ -4813,7 +4814,7 @@ public class TestFromClientSide { final byte [] value3 = Bytes.toBytes("cccc"); final byte [] value4 = Bytes.toBytes("dddd"); - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testCheckAndPutWithCompareOp"), + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes("testCheckAndPutWithCompareOp"), new byte [][] {FAMILY}); Put put2 = new Put(ROW); @@ -4879,7 +4880,7 @@ public class TestFromClientSide { final byte [] value3 = Bytes.toBytes("cccc"); final byte [] value4 = Bytes.toBytes("dddd"); - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testCheckAndDeleteWithCompareOp"), + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes("testCheckAndDeleteWithCompareOp"), new byte [][] {FAMILY}); Put put2 = new Put(ROW); @@ -5272,7 +5273,7 @@ public class TestFromClientSide { @Test public void testJira6912() throws Exception { byte [] TABLE = Bytes.toBytes("testJira6912"); - HTable foo = TEST_UTIL.createTable(TABLE, new byte[][] {FAMILY}, 10); + HTableInterface foo = TEST_UTIL.createTable(TABLE, new byte[][] {FAMILY}, 10); List puts = new ArrayList(); for (int i=0;i !=100; i++){ @@ -5297,7 +5298,7 @@ public class TestFromClientSide { @Test public void testScan_NullQualifier() throws IOException { - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testScan_NullQualifier"), FAMILY); + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes("testScan_NullQualifier"), FAMILY); Put put = new Put(ROW); put.add(FAMILY, QUALIFIER, VALUE); table.put(put); @@ -5326,7 +5327,7 @@ public class TestFromClientSide { @Test public void testNegativeTimestamp() throws IOException { - HTable table = TEST_UTIL.createTable(Bytes.toBytes("testNegativeTimestamp"), FAMILY); + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes("testNegativeTimestamp"), FAMILY); try { Put put = new Put(ROW, -1); @@ -5471,7 +5472,7 @@ public class TestFromClientSide { @Test public void testRawScanRespectsVersions() throws Exception { byte[] TABLE = Bytes.toBytes("testRawScan"); - HTable table = TEST_UTIL.createTable(TABLE, new byte[][] { FAMILY }); + HTableInterface table = TEST_UTIL.createTable(TABLE, new byte[][] { FAMILY }); byte[] row = Bytes.toBytes("row"); // put the same row 4 times, with different values @@ -5547,7 +5548,7 @@ public class TestFromClientSide { public void testSmallScan() throws Exception { // Test Initialization. byte[] TABLE = Bytes.toBytes("testSmallScan"); - HTable table = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface table = TEST_UTIL.createTable(TABLE, FAMILY); // Insert one row each region int insertNum = 10; @@ -5583,7 +5584,7 @@ public class TestFromClientSide { @Test public void testSuperSimpleWithReverseScan() throws Exception { byte[] TABLE = Bytes.toBytes("testSuperSimpleWithReverseScan"); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); Put put = new Put(Bytes.toBytes("0-b11111-0000000000000000000")); put.add(FAMILY, QUALIFIER, VALUE); ht.put(put); @@ -5629,7 +5630,7 @@ public class TestFromClientSide { @Test public void testFiltersWithReverseScan() throws Exception { byte[] TABLE = Bytes.toBytes("testFiltersWithReverseScan"); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); byte[][] ROWS = makeN(ROW, 10); byte[][] QUALIFIERS = { Bytes.toBytes("col0--"), Bytes.toBytes("col1--"), @@ -5669,7 +5670,7 @@ public class TestFromClientSide { @Test public void testKeyOnlyFilterWithReverseScan() throws Exception { byte[] TABLE = Bytes.toBytes("testKeyOnlyFilterWithReverseScan"); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); byte[][] ROWS = makeN(ROW, 10); byte[][] QUALIFIERS = { Bytes.toBytes("col0--"), Bytes.toBytes("col1--"), @@ -5710,7 +5711,7 @@ public class TestFromClientSide { @Test public void testSimpleMissingWithReverseScan() throws Exception { byte[] TABLE = Bytes.toBytes("testSimpleMissingWithReverseScan"); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); byte[][] ROWS = makeN(ROW, 4); // Try to get a row on an empty table @@ -5775,7 +5776,7 @@ public class TestFromClientSide { @Test public void testNullWithReverseScan() throws Exception { byte[] TABLE = Bytes.toBytes("testNullWithReverseScan"); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); // Null qualifier (should work) Put put = new Put(ROW); put.add(FAMILY, null, VALUE); @@ -5816,7 +5817,7 @@ public class TestFromClientSide { byte[][] FAMILIES = makeNAscii(FAMILY, 3); byte[][] VALUES = makeN(VALUE, 5); long[] ts = { 1000, 2000, 3000, 4000, 5000 }; - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, TEST_UTIL.getConfiguration(), 3); Put put = new Put(ROW); @@ -6091,7 +6092,7 @@ public class TestFromClientSide { table.close(); } - private void reverseScanTest(HTable table, boolean small) throws IOException { + private void reverseScanTest(HTableInterface table, boolean small) throws IOException { // scan backward Scan scan = new Scan(); scan.setReversed(true); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java index 64ced17..0ff8dd1 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java @@ -98,7 +98,7 @@ public class TestFromClientSide3 { // Nothing to do. } - private void randomCFPuts(HTable table, byte[] row, byte[] family, int nPuts) + private void randomCFPuts(HTableInterface table, byte[] row, byte[] family, int nPuts) throws Exception { Put put = new Put(row); for (int i = 0; i < nPuts; i++) { @@ -269,7 +269,7 @@ public class TestFromClientSide3 { @Test public void testHTableBatchWithEmptyPut() throws Exception { - HTable table = TEST_UTIL.createTable( + HTableInterface table = TEST_UTIL.createTable( Bytes.toBytes("testHTableBatchWithEmptyPut"), new byte[][] { FAMILY }); try { List actions = (List) new ArrayList(); @@ -296,7 +296,7 @@ public class TestFromClientSide3 { // Test with a single region table. - HTable table = TEST_UTIL.createTable( + HTableInterface table = TEST_UTIL.createTable( Bytes.toBytes("testHTableExistsMethodSingleRegionSingleGet"), new byte[][] { FAMILY }); Put put = new Put(ROW); @@ -315,7 +315,7 @@ public class TestFromClientSide3 { public void testHTableExistsMethodSingleRegionMultipleGets() throws Exception { - HTable table = TEST_UTIL.createTable( + HTableInterface table = TEST_UTIL.createTable( Bytes.toBytes("testHTableExistsMethodSingleRegionMultipleGets"), new byte[][] { FAMILY }); Put put = new Put(ROW); @@ -336,7 +336,7 @@ public class TestFromClientSide3 { @Test public void testHTableExistsMethodMultipleRegionsSingleGet() throws Exception { - HTable table = TEST_UTIL.createTable( + HTableInterface table = TEST_UTIL.createTable( Bytes.toBytes("testHTableExistsMethodMultipleRegionsSingleGet"), new byte[][] { FAMILY }, 1, new byte[] { 0x00 }, new byte[] { (byte) 0xff }, 255); Put put = new Put(ROW); @@ -355,7 +355,7 @@ public class TestFromClientSide3 { @Test public void testHTableExistsMethodMultipleRegionsMultipleGets() throws Exception { - HTable table = TEST_UTIL.createTable( + HTableInterface table = TEST_UTIL.createTable( Bytes.toBytes("testHTableExistsMethodMultipleRegionsMultipleGets"), new byte[][] { FAMILY }, 1, new byte[] { 0x00 }, new byte[] { (byte) 0xff }, 255); Put put = new Put(ROW); @@ -409,7 +409,7 @@ public class TestFromClientSide3 { HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(Bytes.toBytes("test"))); desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable table = new HTable(TEST_UTIL.getConfiguration(), "test"); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), "test"); Put put = new Put(ROW_BYTES); put.add(FAMILY, COL_QUAL, VAL_BYTES); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java index 1a2017a..9535898 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java @@ -63,7 +63,7 @@ public class TestFromClientSideNoCodec { final byte [] t = Bytes.toBytes("testBasics"); final byte [][] fs = new byte[][] {Bytes.toBytes("cf1"), Bytes.toBytes("cf2"), Bytes.toBytes("cf3") }; - HTable ht = TEST_UTIL.createTable(t, fs); + HTableInterface ht = TEST_UTIL.createTable(t, fs); // Check put and get. final byte [] row = Bytes.toBytes("row"); Put p = new Put(row); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java index 54232b6..e5a5abe 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java @@ -425,7 +425,7 @@ public class TestHCM { c2.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1); // Don't retry: retry = test failed c2.setInt(RpcClient.IDLE_TIME, idleTime); - final HTable table = new HTable(c2, tableName.getBytes()); + final HTableInterface table = new HTable(c2, tableName.getBytes()); Put put = new Put(ROW); put.add(FAM_NAM, ROW, ROW); @@ -777,7 +777,7 @@ public class TestHCM { */ @Test public void testConnectionManagement() throws Exception{ - HTable table0 = TEST_UTIL.createTable(TABLE_NAME1, FAM_NAM); + HTableInterface table0 = TEST_UTIL.createTable(TABLE_NAME1, FAM_NAM); HConnection conn = HConnectionManager.createConnection(TEST_UTIL.getConfiguration()); HTableInterface table = conn.getTable(TABLE_NAME1.getName()); table.close(); @@ -1233,7 +1233,7 @@ public class TestHCM { try { c1 = ConnectionManager.getConnectionInternal(config); LOG.info("HTable connection " + i + " " + c1); - HTable table = new HTable(config, TABLE_NAME4, pool); + HTableInterface table = new HTable(config, TABLE_NAME4, pool); table.close(); LOG.info("HTable connection " + i + " closed " + c1); } catch (Exception e) { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java index 9fc0565..199d76d 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java @@ -147,7 +147,7 @@ public class TestMultiParallel { */ @Test(timeout=300000) public void testActiveThreadsCount() throws Exception{ - HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(UTIL.getConfiguration(), TEST_TABLE); List puts = constructPutRequests(); // creates a Put for every region table.batch(puts); Field poolField = table.getClass().getDeclaredField("pool"); @@ -160,7 +160,7 @@ public class TestMultiParallel { @Test(timeout=300000) public void testBatchWithGet() throws Exception { LOG.info("test=testBatchWithGet"); - HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(UTIL.getConfiguration(), TEST_TABLE); // load test data List puts = constructPutRequests(); @@ -199,7 +199,7 @@ public class TestMultiParallel { @Test public void testBadFam() throws Exception { LOG.info("test=testBadFam"); - HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(UTIL.getConfiguration(), TEST_TABLE); List actions = new ArrayList(); Put p = new Put(Bytes.toBytes("row1")); @@ -252,7 +252,7 @@ public class TestMultiParallel { private void doTestFlushCommits(boolean doAbort) throws Exception { // Load the data LOG.info("get new table"); - HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(UTIL.getConfiguration(), TEST_TABLE); table.setAutoFlush(false, true); table.setWriteBufferSize(10 * 1024 * 1024); @@ -321,7 +321,7 @@ public class TestMultiParallel { @Test (timeout=300000) public void testBatchWithPut() throws Exception { LOG.info("test=testBatchWithPut"); - HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(UTIL.getConfiguration(), TEST_TABLE); // put multiple rows using a batch List puts = constructPutRequests(); @@ -354,7 +354,7 @@ public class TestMultiParallel { @Test(timeout=300000) public void testBatchWithDelete() throws Exception { LOG.info("test=testBatchWithDelete"); - HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(UTIL.getConfiguration(), TEST_TABLE); // Load some data List puts = constructPutRequests(); @@ -383,7 +383,7 @@ public class TestMultiParallel { @Test(timeout=300000) public void testHTableDeleteWithList() throws Exception { LOG.info("test=testHTableDeleteWithList"); - HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(UTIL.getConfiguration(), TEST_TABLE); // Load some data List puts = constructPutRequests(); @@ -412,7 +412,7 @@ public class TestMultiParallel { @Test(timeout=300000) public void testBatchWithManyColsInOneRowGetAndPut() throws Exception { LOG.info("test=testBatchWithManyColsInOneRowGetAndPut"); - HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(UTIL.getConfiguration(), TEST_TABLE); List puts = new ArrayList(); for (int i = 0; i < 100; i++) { @@ -453,7 +453,7 @@ public class TestMultiParallel { final byte[] QUAL2 = Bytes.toBytes("qual2"); final byte[] QUAL3 = Bytes.toBytes("qual3"); final byte[] QUAL4 = Bytes.toBytes("qual4"); - HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(UTIL.getConfiguration(), TEST_TABLE); Delete d = new Delete(ONE_ROW); table.delete(d); Put put = new Put(ONE_ROW); @@ -531,7 +531,7 @@ public class TestMultiParallel { Runnable r = new Runnable() { @Override public void run() { - HTable table = null; + HTableInterface table = null; try { table = new HTable(UTIL.getConfiguration(), TEST_TABLE); } catch (IOException e) { @@ -573,7 +573,7 @@ public class TestMultiParallel { @Test(timeout=300000) public void testBatchWithMixedActions() throws Exception { LOG.info("test=testBatchWithMixedActions"); - HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(UTIL.getConfiguration(), TEST_TABLE); // Load some data to start Object[] results = table.batch(constructPutRequests()); @@ -666,7 +666,7 @@ public class TestMultiParallel { return puts; } - private void validateLoadedData(HTable table) throws IOException { + private void validateLoadedData(HTableInterface table) throws IOException { // get the data back and validate that it is correct for (byte[] k : KEYS) { Get get = new Get(k); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java index 69cad8a..c8f029d 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java @@ -87,7 +87,7 @@ public class TestMultipleTimestamps { byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); Integer[] putRows = new Integer[] {1, 3, 5, 7}; Integer[] putColumns = new Integer[] { 1, 3, 5}; @@ -129,7 +129,7 @@ public class TestMultipleTimestamps { byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); Integer[] putRows = new Integer[] {1, 3, 5, 7}; Integer[] putColumns = new Integer[] { 1, 3, 5}; @@ -170,7 +170,7 @@ public class TestMultipleTimestamps { byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); Integer[] putRows = new Integer[] {1, 3, 5, 7}; Integer[] putColumns = new Integer[] { 1, 3, 5}; @@ -223,7 +223,7 @@ public class TestMultipleTimestamps { byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); Integer[] putRows1 = new Integer[] {1, 2, 3}; Integer[] putColumns1 = new Integer[] { 2, 5, 6}; @@ -293,7 +293,7 @@ public class TestMultipleTimestamps { byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); // For row:0, col:0: insert versions 1 through 5. putNVersions(ht, FAMILY, 0, 0, 1, 5); @@ -327,7 +327,7 @@ public class TestMultipleTimestamps { byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); // For row:0, col:0: insert versions 1 through 5. putNVersions(ht, FAMILY, 0, 0, 1, 5); @@ -353,7 +353,7 @@ public class TestMultipleTimestamps { byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); // For row:0, col:0: insert versions 1 through 5. putNVersions(ht, FAMILY, 0, 0, 1, 5); @@ -379,7 +379,7 @@ public class TestMultipleTimestamps { byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); // For row:0, col:0: insert versions 1 through 5. putNVersions(ht, FAMILY, 0, 0, 1, 5); @@ -428,7 +428,7 @@ public class TestMultipleTimestamps { * versions for the row/column specified by rowIdx & colIdx. * */ - private Cell[] getNVersions(HTable ht, byte[] cf, int rowIdx, + private Cell[] getNVersions(HTableInterface ht, byte[] cf, int rowIdx, int colIdx, List versions) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); @@ -442,7 +442,7 @@ public class TestMultipleTimestamps { return result.rawCells(); } - private ResultScanner scan(HTable ht, byte[] cf, + private ResultScanner scan(HTableInterface ht, byte[] cf, Integer[] rowIndexes, Integer[] columnIndexes, Long[] versions, int maxVersions) throws IOException { @@ -463,7 +463,7 @@ public class TestMultipleTimestamps { return scanner; } - private void put(HTable ht, byte[] cf, Integer[] rowIndexes, + private void put(HTableInterface ht, byte[] cf, Integer[] rowIndexes, Integer[] columnIndexes, Long[] versions) throws IOException { for (int rowIdx: rowIndexes) { @@ -485,7 +485,7 @@ public class TestMultipleTimestamps { * Insert in specific row/column versions with timestamps * versionStart..versionEnd. */ - private void putNVersions(HTable ht, byte[] cf, int rowIdx, int colIdx, + private void putNVersions(HTableInterface ht, byte[] cf, int rowIdx, int colIdx, long versionStart, long versionEnd) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); @@ -504,7 +504,7 @@ public class TestMultipleTimestamps { * For row/column specified by rowIdx/colIdx, delete the cell * corresponding to the specified version. */ - private void deleteOneVersion(HTable ht, byte[] cf, int rowIdx, + private void deleteOneVersion(HTableInterface ht, byte[] cf, int rowIdx, int colIdx, long version) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); @@ -518,7 +518,7 @@ public class TestMultipleTimestamps { * For row/column specified by rowIdx/colIdx, delete all cells * preceeding the specified version. */ - private void deleteAllVersionsBefore(HTable ht, byte[] cf, int rowIdx, + private void deleteAllVersionsBefore(HTableInterface ht, byte[] cf, int rowIdx, int colIdx, long version) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); @@ -528,7 +528,8 @@ public class TestMultipleTimestamps { ht.delete(del); } - private void deleteColumn(HTable ht, byte[] cf, int rowIdx, int colIdx) throws IOException { + private void deleteColumn(HTableInterface ht, byte[] cf, int rowIdx, int colIdx) + throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); byte column[] = Bytes.toBytes("column:" + colIdx); Delete del = new Delete(row); @@ -536,7 +537,7 @@ public class TestMultipleTimestamps { ht.delete(del); } - private void deleteFamily(HTable ht, byte[] cf, int rowIdx) throws IOException { + private void deleteFamily(HTableInterface ht, byte[] cf, int rowIdx) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); Delete del = new Delete(row); del.deleteFamily(cf); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java index d8baea4..de81f45 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java @@ -139,7 +139,7 @@ public class TestReplicaWithCluster { HTableDescriptor hdt = HTU.createTableDescriptor("testCreateDeleteTable"); hdt.setRegionReplication(NB_SERVERS); hdt.addCoprocessor(SlowMeCopro.class.getName()); - HTable table = HTU.createTable(hdt, new byte[][]{f}, HTU.getConfiguration()); + HTableInterface table = HTU.createTable(hdt, new byte[][]{f}, HTU.getConfiguration()); Put p = new Put(row); p.add(f, row, row); @@ -171,7 +171,7 @@ public class TestReplicaWithCluster { HTableDescriptor hdt = HTU.createTableDescriptor("testChangeTable"); hdt.setRegionReplication(NB_SERVERS); hdt.addCoprocessor(SlowMeCopro.class.getName()); - HTable table = HTU.createTable(hdt, new byte[][]{f}, HTU.getConfiguration()); + HTableInterface table = HTU.createTable(hdt, new byte[][]{f}, HTU.getConfiguration()); // basic test: it should work. Put p = new Put(row); @@ -253,7 +253,7 @@ public class TestReplicaWithCluster { Put p = new Put(row); p.add(row, row, row); - final HTable table = new HTable(HTU.getConfiguration(), hdt.getTableName()); + final HTableInterface table = new HTable(HTU.getConfiguration(), hdt.getTableName()); table.put(p); HTU.getHBaseAdmin().flush(table.getTableName()); @@ -277,7 +277,7 @@ public class TestReplicaWithCluster { table.close(); LOG.info("stale get on the first cluster done. Now for the second."); - final HTable table2 = new HTable(HTU.getConfiguration(), hdt.getTableName()); + final HTableInterface table2 = new HTable(HTU.getConfiguration(), hdt.getTableName()); Waiter.waitFor(HTU.getConfiguration(), 1000, new Waiter.Predicate() { @Override public boolean evaluate() throws Exception { @@ -312,7 +312,7 @@ public class TestReplicaWithCluster { HTableDescriptor hdt = HTU.createTableDescriptor("testBulkLoad"); hdt.setRegionReplication(NB_SERVERS); hdt.addCoprocessor(SlowMeCopro.class.getName()); - HTable table = HTU.createTable(hdt, new byte[][]{f}, HTU.getConfiguration()); + HTableInterface table = HTU.createTable(hdt, new byte[][]{f}, HTU.getConfiguration()); // create hfiles to load. LOG.debug("Creating test data"); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java index f67f491..195007c 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java @@ -110,7 +110,7 @@ public class TestRestoreSnapshotFromClient { // take an empty snapshot admin.snapshot(emptySnapshot, tableName); - HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); // enable table and insert data admin.enableTable(tableName); SnapshotTestingUtils.loadData(TEST_UTIL, table, 500, FAMILY); @@ -174,7 +174,7 @@ public class TestRestoreSnapshotFromClient { public void testRestoreSchemaChange() throws Exception { byte[] TEST_FAMILY2 = Bytes.toBytes("cf2"); - HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); // Add one column family and put some data in it admin.disableTable(tableName); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java index 02c2ef8..22eaf82 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java @@ -130,7 +130,7 @@ public class TestRpcControllerFactory { // change one of the connection properties so we get a new HConnection with our configuration conf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, HConstants.DEFAULT_HBASE_RPC_TIMEOUT + 1); - HTable table = new HTable(conf, name); + HTableInterface table = new HTable(conf, name); table.setAutoFlushTo(false); byte[] row = Bytes.toBytes("row"); Put p = new Put(row); @@ -188,7 +188,7 @@ public class TestRpcControllerFactory { table.close(); } - int doScan(HTable table, Scan scan, int expectedCount) throws IOException { + int doScan(HTableInterface table, Scan scan, int expectedCount) throws IOException { ResultScanner results = table.getScanner(scan); results.next(); results.close(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java index 65fb0d6..97162ad 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java @@ -65,7 +65,7 @@ public class TestScannerTimeout { c.setInt(HConstants.THREAD_WAKE_FREQUENCY, THREAD_WAKE_FREQUENCY); // We need more than one region server for this test TEST_UTIL.startMiniCluster(2); - HTable table = TEST_UTIL.createTable(TABLE_NAME, SOME_BYTES); + HTableInterface table = TEST_UTIL.createTable(TABLE_NAME, SOME_BYTES); for (int i = 0; i < NB_ROWS; i++) { Put put = new Put(Bytes.toBytes(i)); put.add(SOME_BYTES, SOME_BYTES, SOME_BYTES); @@ -99,7 +99,7 @@ public class TestScannerTimeout { LOG.info("START ************ test2481"); Scan scan = new Scan(); scan.setCaching(1); - HTable table = + HTableInterface table = new HTable(new Configuration(TEST_UTIL.getConfiguration()), TABLE_NAME); ResultScanner r = table.getScanner(scan); int count = 0; @@ -139,7 +139,7 @@ public class TestScannerTimeout { // this new table Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); conf.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, SCANNER_TIMEOUT * 100); - HTable higherScanTimeoutTable = new HTable(conf, TABLE_NAME); + HTableInterface higherScanTimeoutTable = new HTable(conf, TABLE_NAME); ResultScanner r = higherScanTimeoutTable.getScanner(scan); // This takes way less than SCANNER_TIMEOUT*100 rs.abort("die!"); @@ -173,7 +173,7 @@ public class TestScannerTimeout { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); conf.setInt( HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, SCANNER_TIMEOUT*100); - HTable table = new HTable(conf, TABLE_NAME); + HTableInterface table = new HTable(conf, TABLE_NAME); LOG.info("START ************ TEST3686A---22"); ResultScanner r = table.getScanner(scan); @@ -212,7 +212,7 @@ public class TestScannerTimeout { // this new table Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); conf.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, SCANNER_TIMEOUT * 100); - HTable higherScanTimeoutTable = new HTable(conf, TABLE_NAME); + HTableInterface higherScanTimeoutTable = new HTable(conf, TABLE_NAME); ResultScanner r = higherScanTimeoutTable.getScanner(scan); int count = 1; r.next(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java index 380b337..0f0d650 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java @@ -106,7 +106,7 @@ public class TestScannersFromClientSide { byte [] TABLE = Bytes.toBytes("testScanBatch"); byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 8); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILY); Put put; Scan scan; @@ -182,7 +182,7 @@ public class TestScannersFromClientSide { byte [][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3); byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 20); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES); Get get; Put put; @@ -303,7 +303,7 @@ public class TestScannersFromClientSide { byte [][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3); byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 10); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES); Put put; Scan scan; @@ -352,7 +352,7 @@ public class TestScannersFromClientSide { byte [][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3); byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 20); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES); Get get; Put put; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java index b300dfa..78d61e8 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java @@ -194,7 +194,7 @@ public class TestSnapshotCloneIndependence { final TableName localTableName = TableName.valueOf(STRING_TABLE_NAME + startTime); - HTable original = UTIL.createTable(localTableName, TEST_FAM); + HTableInterface original = UTIL.createTable(localTableName, TEST_FAM); try { UTIL.loadTable(original, TEST_FAM); @@ -213,7 +213,7 @@ public class TestSnapshotCloneIndependence { TableName cloneTableName = TableName.valueOf("test-clone-" + localTableName); admin.cloneSnapshot(snapshotName, cloneTableName); - HTable clonedTable = new HTable(UTIL.getConfiguration(), cloneTableName); + HTableInterface clonedTable = new HTable(UTIL.getConfiguration(), cloneTableName); try { final int clonedTableRowCount = UTIL.countRows(clonedTable); @@ -327,7 +327,7 @@ public class TestSnapshotCloneIndependence { final long startTime = System.currentTimeMillis(); final TableName localTableName = TableName.valueOf(STRING_TABLE_NAME + startTime); - HTable original = UTIL.createTable(localTableName, TEST_FAM); + HTableInterface original = UTIL.createTable(localTableName, TEST_FAM); UTIL.loadTable(original, TEST_FAM); final String snapshotNameAsString = "snapshot_" + localTableName; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java index b5c1359..c473975 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java @@ -148,7 +148,7 @@ public class TestSnapshotFromClient { SnapshotTestingUtils.assertNoSnapshots(admin); // put some stuff in the table - HTable table = new HTable(UTIL.getConfiguration(), TABLE_NAME); + HTableInterface table = new HTable(UTIL.getConfiguration(), TABLE_NAME); UTIL.loadTable(table, TEST_FAM); table.close(); @@ -184,7 +184,7 @@ public class TestSnapshotFromClient { SnapshotTestingUtils.assertNoSnapshots(admin); // put some stuff in the table - HTable table = new HTable(UTIL.getConfiguration(), TABLE_NAME); + HTableInterface table = new HTable(UTIL.getConfiguration(), TABLE_NAME); UTIL.loadTable(table, TEST_FAM, false); LOG.debug("FS state before disable:"); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java index f441d59..6b1e798 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java @@ -169,7 +169,7 @@ public class TestSnapshotMetadata { assertTrue(htd.getConfiguration().size() > 0); admin.createTable(htd); - HTable original = new HTable(UTIL.getConfiguration(), originalTableName); + HTableInterface original = new HTable(UTIL.getConfiguration(), originalTableName); originalTableName = TableName.valueOf(sourceTableNameAsString); originalTableDescriptor = admin.getTableDescriptor(originalTableName); originalTableDescription = originalTableDescriptor.toStringCustomizedValues(); @@ -201,7 +201,7 @@ public class TestSnapshotMetadata { familiesList, snapshotNameAsString, rootDir, fs, /* onlineSnapshot= */ false); admin.cloneSnapshot(snapshotName, clonedTableName); - HTable clonedTable = new HTable(UTIL.getConfiguration(), clonedTableName); + HTableInterface clonedTable = new HTable(UTIL.getConfiguration(), clonedTableName); HTableDescriptor cloneHtd = admin.getTableDescriptor(clonedTableName); assertEquals( originalTableDescription.replace(originalTableName.getNameAsString(),clonedTableNameAsString), @@ -265,7 +265,7 @@ public class TestSnapshotMetadata { List familiesWithDataList = new ArrayList(); List emptyFamiliesList = new ArrayList(); if (addData) { - HTable original = new HTable(UTIL.getConfiguration(), originalTableName); + HTableInterface original = new HTable(UTIL.getConfiguration(), originalTableName); UTIL.loadTable(original, familyForUpdate); // family arbitrarily chosen original.close(); @@ -312,7 +312,7 @@ public class TestSnapshotMetadata { admin.enableTable(originalTableName); // verify that the descrption is reverted - HTable original = new HTable(UTIL.getConfiguration(), originalTableName); + HTableInterface original = new HTable(UTIL.getConfiguration(), originalTableName); try { assertTrue(originalTableDescriptor.equals(admin.getTableDescriptor(originalTableName))); assertTrue(originalTableDescriptor.equals(original.getTableDescriptor())); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java index 8d7c6d5..1be63be 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java @@ -91,7 +91,7 @@ public class TestTableSnapshotScanner { Admin admin = util.getHBaseAdmin(); // put some stuff in the table - HTable table = new HTable(util.getConfiguration(), tableName); + HTableInterface table = new HTable(util.getConfiguration(), tableName); util.loadTable(table, FAMILIES); Path rootDir = FSUtils.getRootDir(util.getConfiguration()); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java index 05f9c53..1222bfc 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java @@ -96,7 +96,7 @@ public class TestTimestampsFilter { Cell kvs[]; // create table; set versions to max... - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); for (int rowIdx = 0; rowIdx < 5; rowIdx++) { for (int colIdx = 0; colIdx < 5; colIdx++) { @@ -171,7 +171,7 @@ public class TestTimestampsFilter { byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); Put p = new Put(Bytes.toBytes("row")); p.add(FAMILY, Bytes.toBytes("column0"), 3, Bytes.toBytes("value0-3")); @@ -231,7 +231,7 @@ public class TestTimestampsFilter { byte [][] FAMILIES = new byte[][] { FAMILY }; // create table; set versions to max... - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); + HTableInterface ht = TEST_UTIL.createTable(TABLE, FAMILIES, Integer.MAX_VALUE); // For row:0, col:0: insert versions 1 through 5. putNVersions(ht, FAMILY, 0, 0, 1, 5); @@ -254,7 +254,7 @@ public class TestTimestampsFilter { ht.close(); } - private void verifyInsertedValues(HTable ht, byte[] cf) throws IOException { + private void verifyInsertedValues(HTableInterface ht, byte[] cf) throws IOException { for (int rowIdx = 0; rowIdx < 5; rowIdx++) { for (int colIdx = 0; colIdx < 5; colIdx++) { // ask for versions that exist. @@ -313,7 +313,7 @@ public class TestTimestampsFilter { * versions for the row/column specified by rowIdx & colIdx. * */ - private Cell[] getNVersions(HTable ht, byte[] cf, int rowIdx, + private Cell[] getNVersions(HTableInterface ht, byte[] cf, int rowIdx, int colIdx, List versions) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); @@ -332,7 +332,7 @@ public class TestTimestampsFilter { * Uses the TimestampFilter on a Scan to request a specified list of * versions for the rows from startRowIdx to endRowIdx (both inclusive). */ - private Result[] scanNVersions(HTable ht, byte[] cf, int startRowIdx, + private Result[] scanNVersions(HTableInterface ht, byte[] cf, int startRowIdx, int endRowIdx, List versions) throws IOException { byte startRow[] = Bytes.toBytes("row:" + startRowIdx); @@ -349,7 +349,7 @@ public class TestTimestampsFilter { * Insert in specific row/column versions with timestamps * versionStart..versionEnd. */ - private void putNVersions(HTable ht, byte[] cf, int rowIdx, int colIdx, + private void putNVersions(HTableInterface ht, byte[] cf, int rowIdx, int colIdx, long versionStart, long versionEnd) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); @@ -368,7 +368,7 @@ public class TestTimestampsFilter { * For row/column specified by rowIdx/colIdx, delete the cell * corresponding to the specified version. */ - private void deleteOneVersion(HTable ht, byte[] cf, int rowIdx, + private void deleteOneVersion(HTableInterface ht, byte[] cf, int rowIdx, int colIdx, long version) throws IOException { byte row[] = Bytes.toBytes("row:" + rowIdx); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java index 919fd24..2390ec1 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -80,7 +81,7 @@ public class TestConstraint { Constraints.add(desc, CheckWasRunConstraint.class); util.getHBaseAdmin().createTable(desc); - HTable table = new HTable(util.getConfiguration(), tableName); + HTableInterface table = new HTable(util.getConfiguration(), tableName); try { // test that we don't fail on a valid put Put put = new Put(row1); @@ -112,7 +113,7 @@ public class TestConstraint { Constraints.add(desc, AllFailConstraint.class); util.getHBaseAdmin().createTable(desc); - HTable table = new HTable(util.getConfiguration(), tableName); + HTableInterface table = new HTable(util.getConfiguration(), tableName); // test that we do fail on violation Put put = new Put(row1); @@ -155,7 +156,7 @@ public class TestConstraint { Constraints.disableConstraint(desc, AllFailConstraint.class); util.getHBaseAdmin().createTable(desc); - HTable table = new HTable(util.getConfiguration(), tableName); + HTableInterface table = new HTable(util.getConfiguration(), tableName); try { // test that we don't fail because its disabled Put put = new Put(row1); @@ -187,7 +188,7 @@ public class TestConstraint { Constraints.disable(desc); util.getHBaseAdmin().createTable(desc); - HTable table = new HTable(util.getConfiguration(), tableName); + HTableInterface table = new HTable(util.getConfiguration(), tableName); try { // test that we do fail on violation Put put = new Put(row1); @@ -219,7 +220,7 @@ public class TestConstraint { CheckWasRunConstraint.wasRun = false; util.getHBaseAdmin().createTable(desc); - HTable table = new HTable(util.getConfiguration(), tableName); + HTableInterface table = new HTable(util.getConfiguration(), tableName); // test that we do fail on violation Put put = new Put(row1); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java index 655d663..5992f7e 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java @@ -25,6 +25,7 @@ import java.util.Collections; import java.util.Map; import java.util.TreeMap; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -90,7 +91,7 @@ public class TestBatchCoprocessorEndpoint { util.waitUntilAllRegionsAssigned(TEST_TABLE); admin.close(); - HTable table = new HTable(conf, TEST_TABLE); + HTableInterface table = new HTable(conf, TEST_TABLE); for (int i = 0; i < ROWSIZE; i++) { Put put = new Put(ROWS[i]); put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i)); @@ -106,7 +107,7 @@ public class TestBatchCoprocessorEndpoint { @Test public void testAggregationNullResponse() throws Throwable { - HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(util.getConfiguration(), TEST_TABLE); ColumnAggregationWithNullResponseProtos.SumRequest.Builder builder = ColumnAggregationWithNullResponseProtos.SumRequest .newBuilder(); @@ -143,7 +144,7 @@ public class TestBatchCoprocessorEndpoint { return ret; } - private Map sum(final HTable table, final byte[] family, + private Map sum(final HTableInterface table, final byte[] family, final byte[] qualifier, final byte[] start, final byte[] end) throws ServiceException, Throwable { ColumnAggregationProtos.SumRequest.Builder builder = ColumnAggregationProtos.SumRequest @@ -159,7 +160,7 @@ public class TestBatchCoprocessorEndpoint { @Test public void testAggregationWithReturnValue() throws Throwable { - HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(util.getConfiguration(), TEST_TABLE); Map results = sum(table, TEST_FAMILY, TEST_QUALIFIER, ROWS[0], ROWS[ROWS.length - 1]); int sumResult = 0; @@ -195,7 +196,7 @@ public class TestBatchCoprocessorEndpoint { @Test public void testAggregation() throws Throwable { - HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(util.getConfiguration(), TEST_TABLE); Map results = sum(table, TEST_FAMILY, TEST_QUALIFIER, ROWS[0], ROWS[ROWS.length - 1]); int sumResult = 0; @@ -228,7 +229,7 @@ public class TestBatchCoprocessorEndpoint { @Test public void testAggregationWithErrors() throws Throwable { - HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(util.getConfiguration(), TEST_TABLE); final Map results = Collections.synchronizedMap( new TreeMap( diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java index e67bb9a..0eaadeb 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java @@ -31,6 +31,7 @@ import java.util.NavigableMap; import java.util.TreeMap; import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -99,7 +100,7 @@ public class TestCoprocessorEndpoint { util.waitUntilAllRegionsAssigned(TEST_TABLE); admin.close(); - HTable table = new HTable(conf, TEST_TABLE); + HTableInterface table = new HTable(conf, TEST_TABLE); for (int i = 0; i < ROWSIZE; i++) { Put put = new Put(ROWS[i]); put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i)); @@ -113,7 +114,7 @@ public class TestCoprocessorEndpoint { util.shutdownMiniCluster(); } - private Map sum(final HTable table, final byte [] family, + private Map sum(final HTableInterface table, final byte [] family, final byte [] qualifier, final byte [] start, final byte [] end) throws ServiceException, Throwable { return table.coprocessorService(ColumnAggregationProtos.ColumnAggregationService.class, @@ -138,7 +139,7 @@ public class TestCoprocessorEndpoint { @Test public void testAggregation() throws Throwable { - HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(util.getConfiguration(), TEST_TABLE); Map results = sum(table, TEST_FAMILY, TEST_QUALIFIER, ROWS[0], ROWS[ROWS.length-1]); int sumResult = 0; @@ -297,7 +298,7 @@ public class TestCoprocessorEndpoint { Configuration configuration = new Configuration(util.getConfiguration()); // Make it not retry forever configuration.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1); - HTable table = new HTable(configuration, TEST_TABLE); + HTableInterface table = new HTable(configuration, TEST_TABLE); try { CoprocessorRpcChannel protocol = table.coprocessorService(ROWS[0]); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java index b29bec4..fae4766 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java @@ -86,7 +86,7 @@ public class TestHTableWrapper { } private HTableInterface hTableInterface; - private HTable table; + private HTableInterface table; @BeforeClass public static void setupBeforeClass() throws Exception { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java index 13cb906..4cbf237 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java @@ -162,14 +162,14 @@ public class TestOpenTableInCoprocessor { admin.createTable(primary); admin.createTable(other); - HTable table = new HTable(UTIL.getConfiguration(), "primary"); + HTableInterface table = new HTable(UTIL.getConfiguration(), "primary"); Put p = new Put(new byte[] { 'a' }); p.add(family, null, new byte[] { 'a' }); table.put(p); table.flushCommits(); table.close(); - HTable target = new HTable(UTIL.getConfiguration(), otherTable); + HTableInterface target = new HTable(UTIL.getConfiguration(), otherTable); assertTrue("Didn't complete update to target table!", completeCheck[0]); assertEquals("Didn't find inserted row", 1, getKeyValueCount(target)); target.close(); @@ -181,7 +181,7 @@ public class TestOpenTableInCoprocessor { * @return number of keyvalues over all rows in the table * @throws IOException */ - private int getKeyValueCount(HTable table) throws IOException { + private int getKeyValueCount(HTableInterface table) throws IOException { Scan scan = new Scan(); scan.setMaxVersions(Integer.MAX_VALUE - 1); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java index 6997ef5..49eaf9f 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Durability; @@ -93,7 +94,7 @@ public class TestRegionObserverBypass { */ @Test public void testSimple() throws Exception { - HTable t = new HTable(util.getConfiguration(), tableName); + HTableInterface t = new HTable(util.getConfiguration(), tableName); Put p = new Put(row1); p.add(test,dummy,dummy); // before HBASE-4331, this would throw an exception @@ -112,7 +113,7 @@ public class TestRegionObserverBypass { //previous deletes will eclipse successive puts having the same timestamp EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); - HTable t = new HTable(util.getConfiguration(), tableName); + HTableInterface t = new HTable(util.getConfiguration(), tableName); List puts = new ArrayList(); Put p = new Put(row1); p.add(dummy,dummy,dummy); @@ -197,7 +198,7 @@ public class TestRegionObserverBypass { EnvironmentEdgeManager.reset(); } - private void checkRowAndDelete(HTable t, byte[] row, int count) throws IOException { + private void checkRowAndDelete(HTableInterface t, byte[] row, int count) throws IOException { Get g = new Get(row); Result r = t.get(g); assertEquals(count, r.size()); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java index 27b807f..a07ed32 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java @@ -53,6 +53,7 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -115,7 +116,7 @@ public class TestRegionObserverInterface { TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testRegionObserver"); // recreate table every time in order to reset the status of the // coprocessor. - HTable table = util.createTable(tableName, new byte[][] {A, B, C}); + HTableInterface table = util.createTable(tableName, new byte[][] {A, B, C}); try { verifyMethodResult(SimpleRegionObserver.class, new String[] { "hadPreGet", "hadPostGet", "hadPrePut", "hadPostPut", "hadDelete", "hadPostStartRegionOperation", @@ -177,7 +178,7 @@ public class TestRegionObserverInterface { @Test public void testRowMutation() throws IOException { TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testRowMutation"); - HTable table = util.createTable(tableName, new byte[][] {A, B, C}); + HTableInterface table = util.createTable(tableName, new byte[][] {A, B, C}); try { verifyMethodResult(SimpleRegionObserver.class, new String[] {"hadPreGet", "hadPostGet", "hadPrePut", "hadPostPut", @@ -214,7 +215,7 @@ public class TestRegionObserverInterface { @Test public void testIncrementHook() throws IOException { TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testIncrementHook"); - HTable table = util.createTable(tableName, new byte[][] {A, B, C}); + HTableInterface table = util.createTable(tableName, new byte[][] {A, B, C}); try { Increment inc = new Increment(Bytes.toBytes(0)); inc.addColumn(A, A, 1); @@ -242,7 +243,7 @@ public class TestRegionObserverInterface { public void testCheckAndPutHooks() throws IOException { TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testCheckAndPutHooks"); - HTable table = util.createTable(tableName, new byte[][] {A, B, C}); + HTableInterface table = util.createTable(tableName, new byte[][] {A, B, C}); try { Put p = new Put(Bytes.toBytes(0)); p.add(A, A, A); @@ -273,7 +274,7 @@ public class TestRegionObserverInterface { public void testCheckAndDeleteHooks() throws IOException { TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testCheckAndDeleteHooks"); - HTable table = util.createTable(tableName, new byte[][] {A, B, C}); + HTableInterface table = util.createTable(tableName, new byte[][] {A, B, C}); try { Put p = new Put(Bytes.toBytes(0)); p.add(A, A, A); @@ -303,7 +304,7 @@ public class TestRegionObserverInterface { @Test public void testAppendHook() throws IOException { TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testAppendHook"); - HTable table = util.createTable(tableName, new byte[][] {A, B, C}); + HTableInterface table = util.createTable(tableName, new byte[][] {A, B, C}); try { Append app = new Append(Bytes.toBytes(0)); app.add(A, A, A); @@ -342,7 +343,7 @@ public class TestRegionObserverInterface { new Boolean[] {false, false, false, false} ); - HTable table = new HTable(util.getConfiguration(), tableName); + HTableInterface table = new HTable(util.getConfiguration(), tableName); Put put = new Put(ROW); put.add(A, A, A); table.put(put); @@ -392,7 +393,7 @@ public class TestRegionObserverInterface { new Boolean[] {false, false} ); - HTable table = new HTable(util.getConfiguration(), tableName); + HTableInterface table = new HTable(util.getConfiguration(), tableName); Put put = new Put(ROW); put.add(A, A, A); table.put(put); @@ -499,7 +500,7 @@ public class TestRegionObserverInterface { htd.addCoprocessor(EvenOnlyCompactor.class.getName()); admin.createTable(htd); - HTable table = new HTable(util.getConfiguration(), compactTable); + HTableInterface table = new HTable(util.getConfiguration(), compactTable); for (long i=1; i<=10; i++) { byte[] iBytes = Bytes.toBytes(i); Put put = new Put(iBytes); @@ -561,7 +562,7 @@ public class TestRegionObserverInterface { String testName = TestRegionObserverInterface.class.getName()+".bulkLoadHFileTest"; TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".bulkLoadHFileTest"); Configuration conf = util.getConfiguration(); - HTable table = util.createTable(tableName, new byte[][] {A, B, C}); + HTableInterface table = util.createTable(tableName, new byte[][] {A, B, C}); try { verifyMethodResult(SimpleRegionObserver.class, new String[] {"hadPreBulkLoadHFile", "hadPostBulkLoadHFile"}, diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java index 5c05169..95a4efc 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -258,7 +259,7 @@ public class TestRegionObserverScannerOpenHook { Admin admin = UTIL.getHBaseAdmin(); admin.createTable(desc); - HTable table = new HTable(conf, desc.getTableName()); + HTableInterface table = new HTable(conf, desc.getTableName()); // put a row and flush it to disk Put put = new Put(ROW); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java index 014b689..a00ff61 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java @@ -24,6 +24,7 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.regionserver.HRegionServer; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java index 085348e..b7e8b64 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java @@ -34,6 +34,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; @@ -107,7 +108,7 @@ public class TestRowProcessorEndpoint { private static volatile int expectedCounter = 0; private static int rowSize, row2Size; - private volatile static HTable table = null; + private volatile static HTableInterface table = null; private volatile static boolean swapped = false; private volatile CountDownLatch startSignal; private volatile CountDownLatch doneSignal; @@ -196,7 +197,7 @@ public class TestRowProcessorEndpoint { } } - private int incrementCounter(HTable table) throws Throwable { + private int incrementCounter(HTableInterface table) throws Throwable { CoprocessorRpcChannel channel = table.coprocessorService(ROW); RowProcessorEndpoint.IncrementCounterProcessor processor = new RowProcessorEndpoint.IncrementCounterProcessor(ROW); @@ -259,7 +260,7 @@ public class TestRowProcessorEndpoint { } } - private void swapRows(HTable table) throws Throwable { + private void swapRows(HTableInterface table) throws Throwable { CoprocessorRpcChannel channel = table.coprocessorService(ROW); RowProcessorEndpoint.RowSwapProcessor processor = new RowProcessorEndpoint.RowSwapProcessor(ROW, ROW2); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java index 20135dd..61898b3 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java @@ -31,6 +31,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -161,7 +162,7 @@ public class TestColumnRangeFilter { public void TestColumnRangeFilterClient() throws Exception { String family = "Family"; String table = "TestColumnRangeFilterClient"; - HTable ht = TEST_UTIL.createTable(Bytes.toBytes(table), + HTableInterface ht = TEST_UTIL.createTable(Bytes.toBytes(table), Bytes.toBytes(family), Integer.MAX_VALUE); List rows = generateRandomWords(10, 8); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java index a35d5c5..9564d0a 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -85,7 +86,7 @@ public class TestFilterWithScanLimits { // add filter after batch defined scan.setFilter(filter); - HTable table = new HTable(conf, name); + HTableInterface table = new HTable(conf, name); ResultScanner scanner = table.getScanner(scan); // Expect to get following row // row2 => , , @@ -111,7 +112,7 @@ public class TestFilterWithScanLimits { private static void prepareData() { try { - HTable table = new HTable(TestFilterWithScanLimits.conf, name); + HTableInterface table = new HTable(TestFilterWithScanLimits.conf, name); assertTrue("Fail to create the table", admin.tableExists(name)); List puts = new ArrayList(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java index 9587aa3..c4a8c8c 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -83,7 +84,7 @@ public class TestFilterWrapper { FilterList filter = new FilterList(fs); scan.setFilter(filter); - HTable table = new HTable(conf, name); + HTableInterface table = new HTable(conf, name); ResultScanner scanner = table.getScanner(scan); // row2 (c1-c4) and row3(c1-c4) are returned @@ -110,7 +111,7 @@ public class TestFilterWrapper { private static void prepareData() { try { - HTable table = new HTable(TestFilterWrapper.conf, name); + HTableInterface table = new HTable(TestFilterWrapper.conf, name); assertTrue("Fail to create the table", admin.tableExists(name)); List puts = new ArrayList(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java index 1f853ac..f77f9cc 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -88,7 +89,7 @@ public class TestFuzzyRowAndColumnRangeFilter { public void Test() throws Exception { String cf = "f"; String table = "TestFuzzyAndColumnRangeFilterClient"; - HTable ht = TEST_UTIL.createTable(Bytes.toBytes(table), + HTableInterface ht = TEST_UTIL.createTable(Bytes.toBytes(table), Bytes.toBytes(cf), Integer.MAX_VALUE); // 10 byte row key - (2 bytes 4 bytes 4 bytes) @@ -128,7 +129,7 @@ public class TestFuzzyRowAndColumnRangeFilter { runTest(ht, 1, 8); } - private void runTest(HTable hTable, int cqStart, int expectedSize) throws IOException { + private void runTest(HTableInterface hTable, int cqStart, int expectedSize) throws IOException { // [0, 2, ?, ?, ?, ?, 0, 0, 0, 1] byte[] fuzzyKey = new byte[10]; ByteBuffer buf = ByteBuffer.wrap(fuzzyKey); @@ -150,7 +151,8 @@ public class TestFuzzyRowAndColumnRangeFilter { runScanner(hTable, expectedSize, columnRangeFilter, fuzzyRowFilter); } - private void runScanner(HTable hTable, int expectedSize, Filter... filters) throws IOException { + private void runScanner(HTableInterface hTable, int expectedSize, Filter... filters) + throws IOException { String cf = "f"; Scan scan = new Scan(); scan.addFamily(cf.getBytes()); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java index 49e3465..705399e 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.wal.HLogUtil; @@ -259,7 +260,7 @@ public class TestBlockReorder { // We use the regionserver file system & conf as we expect it to have the hook. conf = targetRs.getConfiguration(); HFileSystem rfs = (HFileSystem) targetRs.getFileSystem(); - HTable h = htu.createTable("table".getBytes(), sb); + HTableInterface h = htu.createTable("table".getBytes(), sb); // Now, we have 4 datanodes and a replication count of 3. So we don't know if the datanode // with the same node will be used. We can't really stop an existing datanode, this would diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java index 0ceb953..8aa0f58 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.regionserver.HRegionServer; @@ -137,7 +138,7 @@ public class TestChangingEncoding { static void writeTestDataBatch(Configuration conf, String tableName, int batchId) throws Exception { LOG.debug("Writing test data batch " + batchId); - HTable table = new HTable(conf, tableName); + HTableInterface table = new HTable(conf, tableName); table.setAutoFlushTo(false); for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) { Put put = new Put(getRowKey(batchId, i)); @@ -155,7 +156,7 @@ public class TestChangingEncoding { static void verifyTestDataBatch(Configuration conf, String tableName, int batchId) throws Exception { LOG.debug("Verifying test data batch " + batchId); - HTable table = new HTable(conf, tableName); + HTableInterface table = new HTable(conf, tableName); for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) { Get get = new Get(getRowKey(batchId, i)); Result result = table.get(get); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java index 0ebef3b..5b5ccb5 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java index 88ffd9f..cbbbee1 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java @@ -36,6 +36,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -124,7 +125,7 @@ public class TestTableInputFormat { * @param table * @throws IOException */ - static void runTestMapred(HTable table) throws IOException { + static void runTestMapred(HTableInterface table) throws IOException { org.apache.hadoop.hbase.mapred.TableRecordReader trr = new org.apache.hadoop.hbase.mapred.TableRecordReader(); trr.setStartRow("aaa".getBytes()); @@ -157,7 +158,7 @@ public class TestTableInputFormat { * @throws IOException * @throws InterruptedException */ - static void runTestMapreduce(HTable table) throws IOException, + static void runTestMapreduce(HTableInterface table) throws IOException, InterruptedException { org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl trr = new org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl(); @@ -268,7 +269,7 @@ public class TestTableInputFormat { */ @Test public void testTableRecordReader() throws IOException { - HTable table = createTable("table1".getBytes()); + HTableInterface table = createTable("table1".getBytes()); runTestMapred(table); } @@ -279,7 +280,7 @@ public class TestTableInputFormat { */ @Test public void testTableRecordReaderScannerFail() throws IOException { - HTable htable = createIOEScannerTable("table2".getBytes(), 1); + HTableInterface htable = createIOEScannerTable("table2".getBytes(), 1); runTestMapred(htable); } @@ -290,7 +291,7 @@ public class TestTableInputFormat { */ @Test(expected = IOException.class) public void testTableRecordReaderScannerFailTwice() throws IOException { - HTable htable = createIOEScannerTable("table3".getBytes(), 2); + HTableInterface htable = createIOEScannerTable("table3".getBytes(), 2); runTestMapred(htable); } @@ -302,7 +303,7 @@ public class TestTableInputFormat { */ @Test public void testTableRecordReaderScannerTimeout() throws IOException { - HTable htable = createDNRIOEScannerTable("table4".getBytes(), 1); + HTableInterface htable = createDNRIOEScannerTable("table4".getBytes(), 1); runTestMapred(htable); } @@ -314,7 +315,7 @@ public class TestTableInputFormat { */ @Test(expected = org.apache.hadoop.hbase.DoNotRetryIOException.class) public void testTableRecordReaderScannerTimeoutTwice() throws IOException { - HTable htable = createDNRIOEScannerTable("table5".getBytes(), 2); + HTableInterface htable = createDNRIOEScannerTable("table5".getBytes(), 2); runTestMapred(htable); } @@ -327,7 +328,7 @@ public class TestTableInputFormat { @Test public void testTableRecordReaderMapreduce() throws IOException, InterruptedException { - HTable table = createTable("table1-mr".getBytes()); + HTableInterface table = createTable("table1-mr".getBytes()); runTestMapreduce(table); } @@ -340,7 +341,7 @@ public class TestTableInputFormat { @Test public void testTableRecordReaderScannerFailMapreduce() throws IOException, InterruptedException { - HTable htable = createIOEScannerTable("table2-mr".getBytes(), 1); + HTableInterface htable = createIOEScannerTable("table2-mr".getBytes(), 1); runTestMapreduce(htable); } @@ -353,7 +354,7 @@ public class TestTableInputFormat { @Test(expected = IOException.class) public void testTableRecordReaderScannerFailMapreduceTwice() throws IOException, InterruptedException { - HTable htable = createIOEScannerTable("table3-mr".getBytes(), 2); + HTableInterface htable = createIOEScannerTable("table3-mr".getBytes(), 2); runTestMapreduce(htable); } @@ -367,7 +368,7 @@ public class TestTableInputFormat { @Test public void testTableRecordReaderScannerTimeoutMapreduce() throws IOException, InterruptedException { - HTable htable = createDNRIOEScannerTable("table4-mr".getBytes(), 1); + HTableInterface htable = createDNRIOEScannerTable("table4-mr".getBytes(), 1); runTestMapreduce(htable); } @@ -381,7 +382,7 @@ public class TestTableInputFormat { @Test(expected = org.apache.hadoop.hbase.DoNotRetryIOException.class) public void testTableRecordReaderScannerTimeoutMapreduceTwice() throws IOException, InterruptedException { - HTable htable = createDNRIOEScannerTable("table5-mr".getBytes(), 2); + HTableInterface htable = createDNRIOEScannerTable("table5-mr".getBytes(), 2); runTestMapreduce(htable); } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java index 218e73b..b047219 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java @@ -28,6 +28,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -72,7 +73,7 @@ public class TestTableMapReduce extends TestTableMapReduceBase { } @Override - protected void runTestOnTable(HTable table) throws IOException { + protected void runTestOnTable(HTableInterface table) throws IOException { JobConf jobConf = null; try { LOG.info("Before map/reduce startup"); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java index 99fc48d..0650546 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java @@ -35,6 +35,7 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -61,7 +62,7 @@ public class TestTableMapReduceUtil { private static final Log LOG = LogFactory .getLog(TestTableMapReduceUtil.class); - private static HTable presidentsTable; + private static HTableInterface presidentsTable; private static final String TABLE_NAME = "People"; private static final byte[] COLUMN_FAMILY = Bytes.toBytes("info"); @@ -110,7 +111,7 @@ public class TestTableMapReduceUtil { return table; } - private static void createPutCommand(HTable table) throws IOException { + private static void createPutCommand(HTableInterface table) throws IOException { for (String president : presidentsRowKeys) { if (presidentNames.hasNext()) { Put p = new Put(Bytes.toBytes(president)); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java index 18b9864..3f3cfdd 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; @@ -153,7 +154,7 @@ public abstract class TableSnapshotInputFormatTestBase { Admin admin = util.getHBaseAdmin(); // put some stuff in the table - HTable table = new HTable(util.getConfiguration(), tableName); + HTableInterface table = new HTable(util.getConfiguration(), tableName); util.loadTable(table, FAMILIES); Path rootDir = FSUtils.getRootDir(util.getConfiguration()); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java index 1ccc57d..6b53499 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.LauncherSecurityManager; @@ -80,7 +81,7 @@ public class TestCellCounter { public void testCellCounter() throws Exception { String sourceTable = "sourceTable"; byte[][] families = { FAMILY_A, FAMILY_B }; - HTable t = UTIL.createTable(Bytes.toBytes(sourceTable), families); + HTableInterface t = UTIL.createTable(Bytes.toBytes(sourceTable), families); try{ Put p = new Put(ROW1); p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java index 6163bb9..1b8c0a5 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; @@ -84,8 +85,8 @@ public class TestCopyTable { final byte[] FAMILY = Bytes.toBytes("family"); final byte[] COLUMN1 = Bytes.toBytes("c1"); - HTable t1 = TEST_UTIL.createTable(TABLENAME1, FAMILY); - HTable t2 = TEST_UTIL.createTable(TABLENAME2, FAMILY); + HTableInterface t1 = TEST_UTIL.createTable(TABLENAME1, FAMILY); + HTableInterface t2 = TEST_UTIL.createTable(TABLENAME2, FAMILY); // put rows into the first table for (int i = 0; i < 10; i++) { @@ -125,8 +126,8 @@ public class TestCopyTable { final byte[] ROW1 = Bytes.toBytes("row1"); final byte[] ROW2 = Bytes.toBytes("row2"); - HTable t1 = TEST_UTIL.createTable(TABLENAME1, FAMILY); - HTable t2 = TEST_UTIL.createTable(TABLENAME2, FAMILY); + HTableInterface t1 = TEST_UTIL.createTable(TABLENAME1, FAMILY); + HTableInterface t2 = TEST_UTIL.createTable(TABLENAME2, FAMILY); // put rows into the first table Put p = new Put(ROW0); @@ -176,8 +177,8 @@ public class TestCopyTable { byte[][] families = { FAMILY_A, FAMILY_B }; - HTable t = TEST_UTIL.createTable(Bytes.toBytes(sourceTable), families); - HTable t2 = TEST_UTIL.createTable(Bytes.toBytes(targetTable), families); + HTableInterface t = TEST_UTIL.createTable(Bytes.toBytes(sourceTable), families); + HTableInterface t2 = TEST_UTIL.createTable(Bytes.toBytes(targetTable), families); Put p = new Put(ROW1); p.add(FAMILY_A, QUALIFIER, Bytes.toBytes("Data11")); p.add(FAMILY_B, QUALIFIER, Bytes.toBytes("Data12")); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java index a46660e..9ce7bbc 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java @@ -57,6 +57,7 @@ import org.apache.hadoop.hbase.PerformanceEvaluation; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -490,7 +491,7 @@ public class TestHFileOutputFormat { Configuration conf = new Configuration(this.util.getConfiguration()); Map familyToCompression = getMockColumnFamiliesForCompression(numCfs); - HTable table = Mockito.mock(HTable.class); + HTableInterface table = Mockito.mock(HTable.class); setupMockColumnFamiliesForCompression(table, familyToCompression); HFileOutputFormat.configureCompression(table, conf); @@ -508,7 +509,7 @@ public class TestHFileOutputFormat { } } - private void setupMockColumnFamiliesForCompression(HTable table, + private void setupMockColumnFamiliesForCompression(HTableInterface table, Map familyToCompression) throws IOException { HTableDescriptor mockTableDescriptor = new HTableDescriptor(TABLE_NAME); for (Entry entry : familyToCompression.entrySet()) { @@ -560,7 +561,7 @@ public class TestHFileOutputFormat { Configuration conf = new Configuration(this.util.getConfiguration()); Map familyToBloomType = getMockColumnFamiliesForBloomType(numCfs); - HTable table = Mockito.mock(HTable.class); + HTableInterface table = Mockito.mock(HTable.class); setupMockColumnFamiliesForBloomType(table, familyToBloomType); HFileOutputFormat.configureBloomType(table, conf); @@ -581,7 +582,7 @@ public class TestHFileOutputFormat { } } - private void setupMockColumnFamiliesForBloomType(HTable table, + private void setupMockColumnFamiliesForBloomType(HTableInterface table, Map familyToDataBlockEncoding) throws IOException { HTableDescriptor mockTableDescriptor = new HTableDescriptor(TABLE_NAME); for (Entry entry : familyToDataBlockEncoding.entrySet()) { @@ -631,7 +632,7 @@ public class TestHFileOutputFormat { Configuration conf = new Configuration(this.util.getConfiguration()); Map familyToBlockSize = getMockColumnFamiliesForBlockSize(numCfs); - HTable table = Mockito.mock(HTable.class); + HTableInterface table = Mockito.mock(HTable.class); setupMockColumnFamiliesForBlockSize(table, familyToBlockSize); HFileOutputFormat.configureBlockSize(table, conf); @@ -653,7 +654,7 @@ public class TestHFileOutputFormat { } } - private void setupMockColumnFamiliesForBlockSize(HTable table, + private void setupMockColumnFamiliesForBlockSize(HTableInterface table, Map familyToDataBlockEncoding) throws IOException { HTableDescriptor mockTableDescriptor = new HTableDescriptor(TABLE_NAME); for (Entry entry : familyToDataBlockEncoding.entrySet()) { @@ -707,7 +708,7 @@ public class TestHFileOutputFormat { Configuration conf = new Configuration(this.util.getConfiguration()); Map familyToDataBlockEncoding = getMockColumnFamiliesForDataBlockEncoding(numCfs); - HTable table = Mockito.mock(HTable.class); + HTableInterface table = Mockito.mock(HTable.class); setupMockColumnFamiliesForDataBlockEncoding(table, familyToDataBlockEncoding); HFileOutputFormat.configureDataBlockEncoding(table, conf); @@ -728,7 +729,7 @@ public class TestHFileOutputFormat { } } - private void setupMockColumnFamiliesForDataBlockEncoding(HTable table, + private void setupMockColumnFamiliesForDataBlockEncoding(HTableInterface table, Map familyToDataBlockEncoding) throws IOException { HTableDescriptor mockTableDescriptor = new HTableDescriptor(TABLE_NAME); for (Entry entry : familyToDataBlockEncoding.entrySet()) { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index 7852fc5..713d152 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -159,7 +160,7 @@ public class TestImportExport { @Test public void testSimpleCase() throws Exception { String EXPORT_TABLE = "exportSimpleCase"; - HTable t = UTIL.createTable(Bytes.toBytes(EXPORT_TABLE), FAMILYA, 3); + HTableInterface t = UTIL.createTable(Bytes.toBytes(EXPORT_TABLE), FAMILYA, 3); Put p = new Put(ROW1); p.add(FAMILYA, QUAL, now, QUAL); p.add(FAMILYA, QUAL, now+1, QUAL); @@ -222,7 +223,7 @@ public class TestImportExport { fs.copyFromLocalFile(importPath, new Path(FQ_OUTPUT_DIR + Path.SEPARATOR + "exportedTableIn94Format")); String IMPORT_TABLE = "importTableExportedFrom94"; - HTable t = UTIL.createTable(Bytes.toBytes(IMPORT_TABLE), Bytes.toBytes("f1"), 3); + HTableInterface t = UTIL.createTable(Bytes.toBytes(IMPORT_TABLE), Bytes.toBytes("f1"), 3); String[] args = new String[] { "-Dhbase.import.version=0.94" , IMPORT_TABLE, FQ_OUTPUT_DIR @@ -252,7 +253,7 @@ public class TestImportExport { .setMaxVersions(1) ); UTIL.getHBaseAdmin().createTable(desc); - HTable t = new HTable(UTIL.getConfiguration(), BATCH_TABLE); + HTableInterface t = new HTable(UTIL.getConfiguration(), BATCH_TABLE); Put p = new Put(ROW1); p.add(FAMILYA, QUAL, now, QUAL); @@ -283,7 +284,7 @@ public class TestImportExport { .setKeepDeletedCells(true) ); UTIL.getHBaseAdmin().createTable(desc); - HTable t = new HTable(UTIL.getConfiguration(), EXPORT_TABLE); + HTableInterface t = new HTable(UTIL.getConfiguration(), EXPORT_TABLE); Put p = new Put(ROW1); p.add(FAMILYA, QUAL, now, QUAL); @@ -349,7 +350,7 @@ public class TestImportExport { HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE)); desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5)); UTIL.getHBaseAdmin().createTable(desc); - HTable exportTable = new HTable(UTIL.getConfiguration(), EXPORT_TABLE); + HTableInterface exportTable = new HTable(UTIL.getConfiguration(), EXPORT_TABLE); Put p = new Put(ROW1); p.add(FAMILYA, QUAL, now, QUAL); @@ -376,7 +377,7 @@ public class TestImportExport { desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5)); UTIL.getHBaseAdmin().createTable(desc); - HTable importTable = new HTable(UTIL.getConfiguration(), IMPORT_TABLE); + HTableInterface importTable = new HTable(UTIL.getConfiguration(), IMPORT_TABLE); args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(), "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE, FQ_OUTPUT_DIR, "1000" }; @@ -410,7 +411,7 @@ public class TestImportExport { * @return * @throws IOException */ - private int getCount(HTable table, Filter filter) throws IOException { + private int getCount(HTableInterface table, Filter filter) throws IOException { Scan scan = new Scan(); scan.setFilter(filter); ResultScanner results = table.getScanner(scan); @@ -542,7 +543,7 @@ public class TestImportExport { public void testDurability() throws IOException, InterruptedException, ClassNotFoundException { // Create an export table. String exportTableName = "exporttestDurability"; - HTable exportTable = UTIL.createTable(Bytes.toBytes(exportTableName), FAMILYA, 3); + HTableInterface exportTable = UTIL.createTable(Bytes.toBytes(exportTableName), FAMILYA, 3); // Insert some data Put put = new Put(ROW1); @@ -563,7 +564,7 @@ public class TestImportExport { // Create the table for import String importTableName = "importTestDurability1"; - HTable importTable = UTIL.createTable(Bytes.toBytes(importTableName), FAMILYA, 3); + HTableInterface importTable = UTIL.createTable(Bytes.toBytes(importTableName), FAMILYA, 3); // Register the hlog listener for the import table TableWALActionListener walListener = new TableWALActionListener(importTableName); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java index 27f4acb..a0b2e94 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -192,7 +193,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable { int valueMultiplier, boolean dataAvailable) throws IOException { LOG.debug("Validating table."); - HTable table = new HTable(conf, tableName); + HTableInterface table = new HTable(conf, tableName); boolean verified = false; long pause = conf.getLong("hbase.client.pause", 5 * 1000); int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java index 0c483aa..82f5191 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -181,7 +182,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { private void issueDeleteAndVerifyData(String tableName) throws IOException { LOG.debug("Validating table after delete."); - HTable table = new HTable(conf, tableName); + HTableInterface table = new HTable(conf, tableName); boolean verified = false; long pause = conf.getLong("hbase.client.pause", 5 * 1000); int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5); @@ -365,7 +366,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { int valueMultiplier) throws IOException { LOG.debug("Validating table."); - HTable table = new HTable(conf, tableName); + HTableInterface table = new HTable(conf, tableName); boolean verified = false; long pause = conf.getLong("hbase.client.pause", 5 * 1000); int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java index 075c0ab..e0232bd 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java @@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -302,7 +303,7 @@ public class TestImportTsv implements Configurable { String family, int valueMultiplier) throws IOException { LOG.debug("Validating table."); - HTable table = new HTable(conf, tableName); + HTableInterface table = new HTable(conf, tableName); boolean verified = false; long pause = conf.getLong("hbase.client.pause", 5 * 1000); int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java index 27c809a..04d6976 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileScanner; @@ -249,7 +250,7 @@ public class TestLoadIncrementalHFiles { String [] args= {dir.toString(), tableName.toString()}; loader.run(args); - HTable table = new HTable(util.getConfiguration(), tableName); + HTableInterface table = new HTable(util.getConfiguration(), tableName); try { assertEquals(expectedRows, util.countRows(table)); } finally { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java index 491c2c6..07dfb56 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java @@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -229,7 +230,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { * @throws IOException */ void assertExpectedTable(String table, int count, int value) throws IOException { - HTable t = null; + HTableInterface t = null; try { assertEquals(util.getHBaseAdmin().listTables(table).length, 1); t = new HTable(util.getConfiguration(), table); @@ -341,7 +342,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { LoadIncrementalHFiles lih2 = new LoadIncrementalHFiles( util.getConfiguration()) { - protected void bulkLoadPhase(final HTable htable, final HConnection conn, + protected void bulkLoadPhase(final HTableInterface htable, final HConnection conn, ExecutorService pool, Deque queue, final Multimap regionGroups) throws IOException { int i = attemptedCalls.incrementAndGet(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java index 34bca43..d79136f 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java @@ -30,6 +30,7 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -126,7 +127,7 @@ public class TestMultithreadedTableMapper { MULTI_REGION_TABLE_NAME)); } - private void runTestOnTable(HTable table) + private void runTestOnTable(HTableInterface table) throws IOException, InterruptedException, ClassNotFoundException { Job job = null; try { @@ -160,7 +161,7 @@ public class TestMultithreadedTableMapper { } private void verify(String tableName) throws IOException { - HTable table = new HTable(new Configuration(UTIL.getConfiguration()), tableName); + HTableInterface table = new HTable(new Configuration(UTIL.getConfiguration()), tableName); boolean verified = false; long pause = UTIL.getConfiguration().getLong("hbase.client.pause", 5 * 1000); int numRetries = UTIL.getConfiguration().getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5); @@ -193,7 +194,7 @@ public class TestMultithreadedTableMapper { * @throws IOException * @throws NullPointerException if we failed to find a cell value */ - private void verifyAttempt(final HTable table) + private void verifyAttempt(final HTableInterface table) throws IOException, NullPointerException { Scan scan = new Scan(); scan.addFamily(INPUT_FAMILY); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java index f5bbddc..b65aa0c 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java @@ -33,6 +33,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.mapreduce.RowCounter.RowCounterMapper; import org.apache.hadoop.hbase.util.Bytes; @@ -67,7 +68,7 @@ public class TestRowCounter { public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); TEST_UTIL.startMiniMapReduceCluster(); - HTable table = TEST_UTIL.createTable(Bytes.toBytes(TABLE_NAME), + HTableInterface table = TEST_UTIL.createTable(Bytes.toBytes(TABLE_NAME), Bytes.toBytes(COL_FAM)); writeRows(table); table.close(); @@ -164,7 +165,7 @@ public class TestRowCounter { * @param table * @throws IOException */ - private static void writeRows(HTable table) throws IOException { + private static void writeRows(HTableInterface table) throws IOException { final byte[] family = Bytes.toBytes(COL_FAM); final byte[] value = Bytes.toBytes("abcd"); final byte[] col1 = Bytes.toBytes(COL1); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java index 11a54d4..2ba82fd 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -100,7 +101,7 @@ public class TestTableMapReduce extends TestTableMapReduceBase { } } - protected void runTestOnTable(HTable table) throws IOException { + protected void runTestOnTable(HTableInterface table) throws IOException { Job job = null; try { LOG.info("Before map/reduce startup"); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java index 8a45f14..2da4fd1 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -69,7 +70,7 @@ public abstract class TestTableMapReduceBase { /** * Handles API-specifics for setting up and executing the job. */ - protected abstract void runTestOnTable(HTable table) throws IOException; + protected abstract void runTestOnTable(HTableInterface table) throws IOException; @BeforeClass public static void beforeClass() throws Exception { @@ -132,7 +133,7 @@ public abstract class TestTableMapReduceBase { } protected void verify(String tableName) throws IOException { - HTable table = new HTable(UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(UTIL.getConfiguration(), tableName); boolean verified = false; long pause = UTIL.getConfiguration().getLong("hbase.client.pause", 5 * 1000); int numRetries = UTIL.getConfiguration().getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5); @@ -163,7 +164,7 @@ public abstract class TestTableMapReduceBase { * @throws IOException * @throws NullPointerException if we failed to find a cell value */ - private void verifyAttempt(final HTable table) throws IOException, NullPointerException { + private void verifyAttempt(final HTableInterface table) throws IOException, NullPointerException { Scan scan = new Scan(); TableInputFormat.addColumns(scan, columns); ResultScanner scanner = table.getScanner(scan); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java index a5c0b92..c5486da 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -103,7 +104,7 @@ public class TestTimeRangeMapRed { implements Configurable { private Configuration conf = null; - private HTable table = null; + private HTableInterface table = null; @Override public void map(ImmutableBytesWritable key, Result result, @@ -147,13 +148,13 @@ public class TestTimeRangeMapRed { col.setMaxVersions(Integer.MAX_VALUE); desc.addFamily(col); admin.createTable(desc); - HTable table = new HTable(UTIL.getConfiguration(), desc.getTableName()); + HTableInterface table = new HTable(UTIL.getConfiguration(), desc.getTableName()); prepareTest(table); runTestOnTable(); verify(table); } - private void prepareTest(final HTable table) throws IOException { + private void prepareTest(final HTableInterface table) throws IOException { for (Map.Entry entry : TIMESTAMP.entrySet()) { Put put = new Put(KEY); put.setDurability(Durability.SKIP_WAL); @@ -190,7 +191,7 @@ public class TestTimeRangeMapRed { } } - private void verify(final HTable table) throws IOException { + private void verify(final HTableInterface table) throws IOException { Scan scan = new Scan(); scan.addColumn(FAMILY_NAME, COLUMN_NAME); scan.setMaxVersions(1); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java index 309af73..1b7dbd4 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -91,8 +92,8 @@ public class TestWALPlayer { final byte[] COLUMN1 = Bytes.toBytes("c1"); final byte[] COLUMN2 = Bytes.toBytes("c2"); final byte[] ROW = Bytes.toBytes("row"); - HTable t1 = TEST_UTIL.createTable(TABLENAME1, FAMILY); - HTable t2 = TEST_UTIL.createTable(TABLENAME2, FAMILY); + HTableInterface t1 = TEST_UTIL.createTable(TABLENAME1, FAMILY); + HTableInterface t2 = TEST_UTIL.createTable(TABLENAME2, FAMILY); // put a row into the first table Put p = new Put(ROW); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java index 39d1bab..6eca033 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.util.Bytes; @@ -201,7 +202,7 @@ public class TestAssignmentListener { assertEquals(0, listener.getCloseCount()); // Add some data - HTable table = new HTable(TEST_UTIL.getConfiguration(), TABLE_NAME); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), TABLE_NAME); try { for (int i = 0; i < 10; ++i) { byte[] key = Bytes.toBytes("row-" + i); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java index 989b9f9..754ffc4 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.ObserverContext; @@ -121,7 +122,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); @@ -165,7 +166,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); final HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); @@ -379,7 +380,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); @@ -428,7 +429,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); @@ -472,7 +473,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); @@ -514,7 +515,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); @@ -612,7 +613,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); @@ -668,7 +669,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); @@ -741,7 +742,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); @@ -816,7 +817,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); @@ -857,7 +858,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); @@ -936,7 +937,7 @@ public class TestAssignmentManagerOnCluster { desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HRegionInfo hri = new HRegionInfo( desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z")); MetaTableAccessor.addRegionToMeta(meta, hri); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java index 87c34be..3e96fe0 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java @@ -71,6 +71,7 @@ import org.apache.hadoop.hbase.client.ConnectionUtils; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.NonceGenerator; import org.apache.hadoop.hbase.client.PerClientRandomNonceGenerator; @@ -276,7 +277,7 @@ public class TestDistributedLogSplitting { master.balanceSwitch(false); final ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "table-creation", null); - HTable ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); + HTableInterface ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); HRegionServer hrs = findRSToKill(false, "table"); List regions = ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices()); @@ -374,7 +375,7 @@ public class TestDistributedLogSplitting { master.balanceSwitch(false); final ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "table-creation", null); - HTable ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); + HTableInterface ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); HRegionServer hrs = findRSToKill(true, "table"); List regions = ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices()); @@ -385,9 +386,8 @@ public class TestDistributedLogSplitting { zkw.close(); } - private void abortRSAndVerifyRecovery(HRegionServer hrs, HTable ht, final ZooKeeperWatcher zkw, - final int numRegions, final int numofLines) throws Exception { - + private void abortRSAndVerifyRecovery(HRegionServer hrs, HTableInterface ht, + final ZooKeeperWatcher zkw, final int numRegions, final int numofLines) throws Exception { abortRSAndWaitForRecovery(hrs, zkw, numRegions); assertEquals(numofLines, TEST_UTIL.countRows(ht)); } @@ -442,7 +442,7 @@ public class TestDistributedLogSplitting { master.balanceSwitch(false); final ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "table-creation", null); - HTable ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); + HTableInterface ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); HRegionServer hrs = findRSToKill(false, "table"); List regions = ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices()); @@ -499,7 +499,7 @@ public class TestDistributedLogSplitting { master.balanceSwitch(false); final ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "table-creation", null); - HTable ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); + HTableInterface ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); HRegionServer hrs = findRSToKill(false, "table"); List regions = ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices()); @@ -557,7 +557,7 @@ public class TestDistributedLogSplitting { List rsts = cluster.getLiveRegionServerThreads(); final ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "table-creation", null); - HTable ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); + HTableInterface ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); List regions = null; HRegionServer hrs1 = findRSToKill(false, "table"); @@ -634,7 +634,7 @@ public class TestDistributedLogSplitting { master.balanceSwitch(false); List rsts = cluster.getLiveRegionServerThreads(); final ZooKeeperWatcher zkw = master.getZooKeeper(); - HTable ht = installTable(zkw, "table", "family", 40); + HTableInterface ht = installTable(zkw, "table", "family", 40); final SplitLogManager slm = master.getMasterFileSystem().splitLogManager; Set regionSet = new HashSet(); @@ -684,7 +684,7 @@ public class TestDistributedLogSplitting { List rsts = cluster.getLiveRegionServerThreads(); final ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "table-creation", null); - HTable ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); + HTableInterface ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); List regions = null; HRegionServer hrs = null; @@ -727,8 +727,10 @@ public class TestDistributedLogSplitting { List rsts = cluster.getLiveRegionServerThreads(); final ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "table-creation", null); - HTable disablingHT = installTable(zkw, "disableTable", "family", NUM_REGIONS_TO_CREATE); - HTable ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE, NUM_REGIONS_TO_CREATE); + HTableInterface disablingHT = + installTable(zkw, "disableTable", "family", NUM_REGIONS_TO_CREATE); + HTableInterface ht = + installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE, NUM_REGIONS_TO_CREATE); // turn off load balancing to prevent regions from moving around otherwise // they will consume recovered.edits @@ -865,7 +867,7 @@ public class TestDistributedLogSplitting { List rsts = cluster.getLiveRegionServerThreads(); final ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "table-creation", null); - HTable ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); + HTableInterface ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); final SplitLogManager slm = master.getMasterFileSystem().splitLogManager; Set regionSet = new HashSet(); @@ -1004,7 +1006,7 @@ public class TestDistributedLogSplitting { final ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "distributed log splitting test", null); - HTable ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); + HTableInterface ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); populateDataInTable(NUM_ROWS_PER_REGION, "family"); @@ -1178,7 +1180,7 @@ public class TestDistributedLogSplitting { List rsts = cluster.getLiveRegionServerThreads(); final ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "table-creation", null); - HTable ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); + HTableInterface ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); List regions = null; HRegionServer hrs = null; @@ -1270,7 +1272,7 @@ public class TestDistributedLogSplitting { List rsts = cluster.getLiveRegionServerThreads(); final ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "table-creation", null); - HTable ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); + HTableInterface ht = installTable(zkw, "table", "family", NUM_REGIONS_TO_CREATE); List regions = null; HRegionServer hrs = null; @@ -1526,7 +1528,8 @@ public class TestDistributedLogSplitting { /** * Load table with puts and deletes with expected values so that we can verify later */ - private void prepareData(final HTable t, final byte[] f, final byte[] column) throws IOException { + private void prepareData(final HTableInterface t, final byte[] f, final byte[] column) + throws IOException { t.setAutoFlush(false, true); byte[] k = new byte[3]; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java index 6bfb618..dbf3c76 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; @@ -68,7 +69,7 @@ public class TestMaster { MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); HMaster m = cluster.getMaster(); - HTable ht = TEST_UTIL.createTable(TABLENAME, FAMILYNAME); + HTableInterface ht = TEST_UTIL.createTable(TABLENAME, FAMILYNAME); assertTrue(m.assignmentManager.getTableStateManager().isTableState(TABLENAME, ZooKeeperProtos.Table.State.ENABLED)); TEST_UTIL.loadTable(ht, FAMILYNAME, false); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java index 117c515..642385b 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; @@ -252,7 +253,7 @@ public class TestMasterOperationsForRegionReplicas { admin.disableTable(table); // now delete one replica info from all the rows // this is to make the meta appear to be only partially updated - HTable metaTable = new HTable(TableName.META_TABLE_NAME, admin.getConnection()); + HTableInterface metaTable = new HTable(TableName.META_TABLE_NAME, admin.getConnection()); for (byte[] row : tableRows) { Delete deleteOneReplicaLocation = new Delete(row); deleteOneReplicaLocation.deleteColumns(HConstants.CATALOG_FAMILY, diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java index 7ed455e..dfa3629 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java index c7d95ad..8bf8d81 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -479,8 +480,8 @@ public class TestMasterTransitions { * @throws IOException */ private static int addToEachStartKey(final int expected) throws IOException { - HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); - HTable meta = new HTable(TEST_UTIL.getConfiguration(), + HTableInterface t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); + HTableInterface meta = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); int rows = 0; Scan scan = new Scan(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestZKBasedOpenCloseRegion.java hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestZKBasedOpenCloseRegion.java index e684d1e..f4158d4 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestZKBasedOpenCloseRegion.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestZKBasedOpenCloseRegion.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.TableDescriptors; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -225,7 +226,7 @@ public class TestZKBasedOpenCloseRegion { private static void waitUntilAllRegionsAssigned() throws IOException { - HTable meta = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); while (true) { int rows = 0; Scan scan = new Scan(); @@ -258,8 +259,8 @@ public class TestZKBasedOpenCloseRegion { * @throws IOException */ private static int addToEachStartKey(final int expected) throws IOException { - HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); - HTable meta = new HTable(TEST_UTIL.getConfiguration(), + HTableInterface t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); + HTableInterface meta = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); int rows = 0; Scan scan = new Scan(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java index 97182df..f649ae2 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.junit.AfterClass; @@ -66,7 +67,7 @@ public class TestTableDeleteFamilyHandler { // Create a table of three families. This will assign a region. TEST_UTIL.createTable(TABLENAME, FAMILIES); - HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); + HTableInterface t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME); while(TEST_UTIL.getMiniHBaseCluster().getMaster().getAssignmentManager() .getRegionStates().getRegionsInTransition().size() > 0) { Thread.sleep(100); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestNamespaceUpgrade.java hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestNamespaceUpgrade.java index fe992b6..72db2fc 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestNamespaceUpgrade.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestNamespaceUpgrade.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -145,7 +146,7 @@ public class TestNamespaceUpgrade { assertEquals(2, TEST_UTIL.getHBaseAdmin().listNamespaceDescriptors().length); //verify ACL table is migrated - HTable secureTable = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + HTableInterface secureTable = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); ResultScanner scanner = secureTable.getScanner(new Scan()); int count = 0; for(Result r : scanner) { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java index 807751a..f110cc8 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState; import org.apache.hadoop.hbase.util.Bytes; @@ -85,7 +86,7 @@ public class TestCompactionState { byte [] fakecf = Bytes.toBytes("fakecf"); boolean caughtMinorCompact = false; boolean caughtMajorCompact = false; - HTable ht = null; + HTableInterface ht = null; try { ht = TEST_UTIL.createTable(table, family); HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration()); @@ -128,7 +129,7 @@ public class TestCompactionState { byte [] family = Bytes.toBytes("family"); byte [][] families = {family, Bytes.add(family, Bytes.toBytes("2")), Bytes.add(family, Bytes.toBytes("3"))}; - HTable ht = null; + HTableInterface ht = null; try { ht = TEST_UTIL.createTable(table, families); loadData(ht, families, 3000, flushes); @@ -213,7 +214,7 @@ public class TestCompactionState { return count; } - private static void loadData(final HTable ht, final byte[][] families, + private static void loadData(final HTableInterface ht, final byte[][] families, final int rows, final int flushes) throws IOException { List puts = new ArrayList(rows); byte[] qualifier = Bytes.toBytes("val"); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java index eabf75f..5827841 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Waiter.Predicate; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; @@ -206,7 +207,7 @@ public class TestEncryptionKeyRotation { TEST_UTIL.getHBaseAdmin().createTable(htd); TEST_UTIL.waitTableAvailable(htd.getName(), 5000); // Create a store file - HTable table = new HTable(conf, htd.getName()); + HTableInterface table = new HTable(conf, htd.getName()); try { table.put(new Put(Bytes.toBytes("testrow")) .add(hcd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value"))); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java index 8dc7143..5ef189a 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; @@ -101,7 +102,7 @@ public class TestEncryptionRandomKeying { TEST_UTIL.waitTableAvailable(htd.getName(), 5000); // Create a store file - HTable table = new HTable(conf, htd.getName()); + HTableInterface table = new HTable(conf, htd.getName()); try { table.put(new Put(Bytes.toBytes("testrow")) .add(hcd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value"))); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java index 8de605d..2b0a200 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.Stoppable; @@ -95,7 +96,7 @@ public class TestEndToEndSplitTransaction { TableName tableName = TableName.valueOf("TestSplit"); byte[] familyName = Bytes.toBytes("fam"); - HTable ht = TEST_UTIL.createTable(tableName, familyName); + HTableInterface ht = TEST_UTIL.createTable(tableName, familyName); TEST_UTIL.loadTable(ht, familyName, false); ht.close(); HRegionServer server = TEST_UTIL.getHBaseCluster().getRegionServer(0); @@ -438,7 +439,7 @@ public class TestEndToEndSplitTransaction { long start = System.currentTimeMillis(); log("blocking until region is split:" + Bytes.toStringBinary(regionName)); HRegionInfo daughterA = null, daughterB = null; - HTable metaTable = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface metaTable = new HTable(conf, TableName.META_TABLE_NAME); try { while (System.currentTimeMillis() - start < timeout) { @@ -477,13 +478,14 @@ public class TestEndToEndSplitTransaction { } } - public static Result getRegionRow(HTable metaTable, byte[] regionName) throws IOException { + public static Result getRegionRow(HTableInterface metaTable, byte[] regionName) + throws IOException { Get get = new Get(regionName); return metaTable.get(get); } - public static void blockUntilRegionIsInMeta(HTable metaTable, long timeout, HRegionInfo hri) - throws IOException, InterruptedException { + public static void blockUntilRegionIsInMeta(HTableInterface metaTable, long timeout, + HRegionInfo hri) throws IOException, InterruptedException { log("blocking until region is in META: " + hri.getRegionNameAsString()); long start = System.currentTimeMillis(); while (System.currentTimeMillis() - start < timeout) { @@ -503,7 +505,7 @@ public class TestEndToEndSplitTransaction { throws IOException, InterruptedException { log("blocking until region is opened for reading:" + hri.getRegionNameAsString()); long start = System.currentTimeMillis(); - HTable table = new HTable(conf, hri.getTable()); + HTableInterface table = new HTable(conf, hri.getTable()); try { byte [] row = hri.getStartKey(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java index 9c9ec70..e0181a0 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileContext; @@ -199,7 +200,7 @@ public class TestFSErrorsExposed { util.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1); // Make a new Configuration so it makes a new connection that has the // above configuration on it; else we use the old one w/ 10 as default. - HTable table = new HTable(new Configuration(util.getConfiguration()), tableName); + HTableInterface table = new HTable(new Configuration(util.getConfiguration()), tableName); // Load some data util.loadTable(table, fam, false); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 3b98533..9e698a2 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -93,6 +93,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -4039,7 +4040,7 @@ public class TestHRegion { try { cluster = htu.startMiniCluster(1, regionServersCount, dataNodeHosts); byte[][] families = { fam1, fam2 }; - HTable ht = htu.createTable(Bytes.toBytes(this.getName()), families); + HTableInterface ht = htu.createTable(Bytes.toBytes(this.getName()), families); // Setting up region byte row[] = Bytes.toBytes("row1"); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java index 2c4ac95..7a9df8e 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -133,7 +134,7 @@ public class TestHRegionOnCluster { } } - private void putDataAndVerify(HTable table, String row, byte[] family, + private void putDataAndVerify(HTableInterface table, String row, byte[] family, String value, int verifyNum) throws IOException { System.out.println("=========Putting data :" + row); Put put = new Put(Bytes.toBytes(row)); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java index 26570d1..a15b5fa 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.RegionServerCallable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -205,7 +206,7 @@ public class TestHRegionServerBulkLoad { */ public static class AtomicScanReader extends RepeatingTestThread { byte targetFamilies[][]; - HTable table; + HTableInterface table; AtomicLong numScans = new AtomicLong(); AtomicLong numRowsScanned = new AtomicLong(); String TABLE_NAME; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java index 8f485cb..7447eb7 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -100,7 +101,7 @@ public class TestJoinedScanners { desc.addFamily(hcd); } htu.getHBaseAdmin().createTable(desc); - HTable ht = new HTable(htu.getConfiguration(), tableName); + HTableInterface ht = new HTable(htu.getConfiguration(), tableName); long rows_to_insert = 1000; int insert_batch = 20; @@ -150,7 +151,7 @@ public class TestJoinedScanners { } } - private void runScanner(HTable table, boolean slow) throws Exception { + private void runScanner(HTableInterface table, boolean slow) throws Exception { long time = System.nanoTime(); Scan scan = new Scan(); scan.addColumn(cf_essential, col_name); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java index d9883e0..506646f 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.Result; @@ -122,7 +123,7 @@ public class TestRegionMergeTransactionOnCluster { TableName.valueOf("testWholesomeMerge"); // Create table and load data. - HTable table = createTableAndLoadData(master, tableName); + HTableInterface table = createTableAndLoadData(master, tableName); // Merge 1st and 2nd region mergeRegionsAndVerifyRegionNum(master, tableName, 0, 1, INITIAL_REGION_NUM - 1); @@ -170,7 +171,7 @@ public class TestRegionMergeTransactionOnCluster { final TableName tableName = TableName.valueOf("testCleanMergeReference"); // Create table and load data. - HTable table = createTableAndLoadData(master, tableName); + HTableInterface table = createTableAndLoadData(master, tableName); // Merge 1st and 2nd region mergeRegionsAndVerifyRegionNum(master, tableName, 0, 1, INITIAL_REGION_NUM - 1); @@ -251,7 +252,7 @@ public class TestRegionMergeTransactionOnCluster { try { // Create table and load data. - HTable table = createTableAndLoadData(master, tableName); + HTableInterface table = createTableAndLoadData(master, tableName); RegionStates regionStates = master.getAssignmentManager().getRegionStates(); List regions = regionStates.getRegionsOfTable(tableName); // Fake offline one region @@ -421,7 +422,7 @@ public class TestRegionMergeTransactionOnCluster { return ret; } - private void loadData(HTable table) throws IOException { + private void loadData(HTableInterface table) throws IOException { for (int i = 0; i < ROWSIZE; i++) { Put put = new Put(ROWS[i]); put.add(FAMILYNAME, QUALIFIER, Bytes.toBytes(i)); @@ -429,7 +430,7 @@ public class TestRegionMergeTransactionOnCluster { } } - private void verifyRowCount(HTable table, int expectedRegionNum) + private void verifyRowCount(HTableInterface table, int expectedRegionNum) throws IOException { ResultScanner scanner = table.getScanner(new Scan()); int rowCount = 0; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java index 9f64a7c..417be5f 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TestMetaTableAccessor; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; @@ -181,7 +182,7 @@ public class TestRegionReplicas { @Test(timeout = 60000) public void testRegionReplicaUpdatesMetaLocation() throws Exception { openRegion(hriSecondary); - HTable meta = null; + HTableInterface meta = null; try { meta = new HTable(HTU.getConfiguration(), TableName.META_TABLE_NAME); TestMetaTableAccessor.assertMetaLocation(meta, hriPrimary.getRegionName() diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java index 99ea553..31782b4 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java @@ -189,7 +189,7 @@ public class TestRegionServerMetrics { TEST_UTIL.createTable(tableName, cf); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); Put p = new Put(row); p.add(cf, qualifier, val); @@ -221,7 +221,7 @@ public class TestRegionServerMetrics { TEST_UTIL.createTable(tableName, cf); //Force a hfile. - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); Put p = new Put(row); p.add(cf, qualifier, val); t.put(p); @@ -247,7 +247,7 @@ public class TestRegionServerMetrics { byte[] valThree = Bytes.toBytes("ValueThree"); TEST_UTIL.createTable(tableName, cf); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); Put p = new Put(row); p.add(cf, qualifier, valOne); t.put(p); @@ -282,7 +282,7 @@ public class TestRegionServerMetrics { TEST_UTIL.createTable(tableName, cf); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); Put p = new Put(row); p.add(cf, qualifier, val); @@ -314,7 +314,7 @@ public class TestRegionServerMetrics { TEST_UTIL.createTable(tableName, cf); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); Put p = new Put(row); p.add(cf, qualifier, val); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java index c453723..b1ea566 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -66,7 +67,7 @@ public class TestSCVFWithMiniCluster { private static final byte[] QUALIFIER_FOO = Bytes.toBytes("foo"); private static final byte[] QUALIFIER_BAR = Bytes.toBytes("bar"); - private static HTable htable; + private static HTableInterface htable; private static Filter scanFilter; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java index bfebaa2..d428aca 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.coprocessor.CoprocessorException; @@ -181,7 +182,7 @@ public class TestServerCustomProtocol { @Test public void testSingleProxy() throws Throwable { - HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(util.getConfiguration(), TEST_TABLE); Map results = ping(table, null, null); // There are three regions so should get back three results. assertEquals(3, results.size()); @@ -231,7 +232,7 @@ public class TestServerCustomProtocol { table.close(); } - private Map hello(final HTable table, final String send, final String response) + private Map hello(final HTableInterface table, final String send, final String response) throws ServiceException, Throwable { Map results = hello(table, send); for (Map.Entry e: results.entrySet()) { @@ -240,14 +241,13 @@ public class TestServerCustomProtocol { return results; } - private Map hello(final HTable table, final String send) + private Map hello(final HTableInterface table, final String send) throws ServiceException, Throwable { return hello(table, send, null, null); } - private Map hello(final HTable table, final String send, final byte [] start, - final byte [] end) - throws ServiceException, Throwable { + private Map hello(final HTableInterface table, final String send, + final byte[] start, final byte[] end) throws ServiceException, Throwable { return table.coprocessorService(PingProtos.PingService.class, start, end, new Batch.Call() { @@ -264,9 +264,8 @@ public class TestServerCustomProtocol { }); } - private Map compoundOfHelloAndPing(final HTable table, final byte [] start, - final byte [] end) - throws ServiceException, Throwable { + private Map compoundOfHelloAndPing(final HTableInterface table, + final byte[] start, final byte[] end) throws ServiceException, Throwable { return table.coprocessorService(PingProtos.PingService.class, start, end, new Batch.Call() { @@ -284,7 +283,7 @@ public class TestServerCustomProtocol { }); } - private Map noop(final HTable table, final byte [] start, + private Map noop(final HTableInterface table, final byte [] start, final byte [] end) throws ServiceException, Throwable { return table.coprocessorService(PingProtos.PingService.class, start, end, @@ -391,8 +390,8 @@ public class TestServerCustomProtocol { table.close(); } - private Map ping(final HTable table, final byte [] start, final byte [] end) - throws ServiceException, Throwable { + private Map ping(final HTableInterface table, final byte[] start, + final byte[] end) throws ServiceException, Throwable { return table.coprocessorService(PingProtos.PingService.class, start, end, new Batch.Call() { @Override @@ -439,7 +438,7 @@ public class TestServerCustomProtocol { @Test public void testEmptyReturnType() throws Throwable { - HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + HTableInterface table = new HTable(util.getConfiguration(), TEST_TABLE); Map results = noop(table, ROW_A, ROW_C); assertEquals("Should have results from three regions", 3, results.size()); // all results should be null diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java index da4b3d9..0618a47 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java @@ -62,6 +62,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -194,7 +195,7 @@ public class TestSplitTransactionOnCluster { try { // Create table then get the single region for our new table. - HTable t = createTableAndWait(tableName.getName(), Bytes.toBytes("cf")); + HTableInterface t = createTableAndWait(tableName.getName(), Bytes.toBytes("cf")); final List regions = cluster.getRegions(tableName); HRegionInfo hri = getAndCheckSingleTableRegion(regions); int regionServerIndex = cluster.getServerWith(regions.get(0).getRegionName()); @@ -281,7 +282,7 @@ public class TestSplitTransactionOnCluster { TableName.valueOf("testRITStateForRollback"); try { // Create table then get the single region for our new table. - HTable t = createTableAndWait(tableName.getName(), Bytes.toBytes("cf")); + HTableInterface t = createTableAndWait(tableName.getName(), Bytes.toBytes("cf")); final List regions = cluster.getRegions(tableName); final HRegionInfo hri = getAndCheckSingleTableRegion(regions); insertData(tableName.getName(), admin, t); @@ -342,8 +343,8 @@ public class TestSplitTransactionOnCluster { Store store = region.getStore(cf); int regionServerIndex = cluster.getServerWith(region.getRegionName()); HRegionServer regionServer = cluster.getRegionServer(regionServerIndex); - - HTable t = new HTable(conf, tableName); + + HTableInterface t = new HTable(conf, tableName); // insert data insertData(tableName, admin, t); insertData(tableName, admin, t); @@ -398,7 +399,7 @@ public class TestSplitTransactionOnCluster { Bytes.toBytes("testRSSplitEphemeralsDisappearButDaughtersAreOnlinedAfterShutdownHandling"); // Create table then get the single region for our new table. - HTable t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY); + HTableInterface t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY); List regions = cluster.getRegions(tableName); HRegionInfo hri = getAndCheckSingleTableRegion(regions); @@ -476,7 +477,7 @@ public class TestSplitTransactionOnCluster { Bytes.toBytes("testExistingZnodeBlocksSplitAndWeRollback"); // Create table then get the single region for our new table. - HTable t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY); + HTableInterface t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY); List regions = cluster.getRegions(tableName); HRegionInfo hri = getAndCheckSingleTableRegion(regions); @@ -546,7 +547,7 @@ public class TestSplitTransactionOnCluster { Bytes.toBytes("testShutdownFixupWhenDaughterHasSplit"); // Create table then get the single region for our new table. - HTable t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY); + HTableInterface t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY); List regions = cluster.getRegions(tableName); HRegionInfo hri = getAndCheckSingleTableRegion(regions); @@ -627,7 +628,7 @@ public class TestSplitTransactionOnCluster { HColumnDescriptor hcd = new HColumnDescriptor("col"); htd.addFamily(hcd); admin.createTable(htd); - HTable table = new HTable(conf, userTableName); + HTableInterface table = new HTable(conf, userTableName); try { for (int i = 0; i <= 5; i++) { String row = "row" + i; @@ -719,7 +720,7 @@ public class TestSplitTransactionOnCluster { } // Create table then get the single region for our new table. - HTable t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY); + HTableInterface t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY); List regions = cluster.getRegions(tableName); HRegionInfo hri = getAndCheckSingleTableRegion(regions); @@ -809,7 +810,7 @@ public class TestSplitTransactionOnCluster { final byte[] tableName = Bytes.toBytes("testMasterRestartAtRegionSplitPendingCatalogJanitor"); // Create table then get the single region for our new table. - HTable t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY); + HTableInterface t = createTableAndWait(tableName, HConstants.CATALOG_FAMILY); List regions = cluster.getRegions(tableName); HRegionInfo hri = getAndCheckSingleTableRegion(regions); @@ -889,7 +890,7 @@ public class TestSplitTransactionOnCluster { final TableName tableName = TableName.valueOf("testTableExistsIfTheSpecifiedTableRegionIsSplitParent"); // Create table then get the single region for our new table. - HTable t = createTableAndWait(tableName.getName(), Bytes.toBytes("cf")); + HTableInterface t = createTableAndWait(tableName.getName(), Bytes.toBytes("cf")); List regions = null; try { regions = cluster.getRegions(tableName); @@ -936,7 +937,7 @@ public class TestSplitTransactionOnCluster { htd.setRegionReplication(2); htd.addCoprocessor(SlowMeCopro.class.getName()); // Create table then get the single region for our new table. - HTable t = TESTING_UTIL.createTable(htd, new byte[][]{Bytes.toBytes("cf")}, + HTableInterface t = TESTING_UTIL.createTable(htd, new byte[][]{Bytes.toBytes("cf")}, TESTING_UTIL.getConfiguration()); int count; List oldRegions; @@ -1008,8 +1009,8 @@ public class TestSplitTransactionOnCluster { } } - private void insertData(final byte[] tableName, HBaseAdmin admin, HTable t) throws IOException, - InterruptedException { + private void insertData(final byte[] tableName, HBaseAdmin admin, HTableInterface t) + throws IOException, InterruptedException { Put p = new Put(Bytes.toBytes("row1")); p.add(Bytes.toBytes("cf"), Bytes.toBytes("q1"), Bytes.toBytes("1")); t.put(p); @@ -1128,8 +1129,8 @@ public class TestSplitTransactionOnCluster { cluster.getServerHoldingRegion(firstTableregions.get(0).getRegionName()); admin.move(secondTableRegions.get(0).getRegionInfo().getEncodedNameAsBytes(), Bytes.toBytes(serverName.getServerName())); - HTable table1 = null; - HTable table2 = null; + HTableInterface table1 = null; + HTableInterface table2 = null; try { table1 = new HTable(TESTING_UTIL.getConfiguration(), firstTable); table2 = new HTable(TESTING_UTIL.getConfiguration(), firstTable); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java index d259933..6595d7c 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; @@ -97,7 +98,7 @@ public class TestTags { @Test public void testTags() throws Exception { - HTable table = null; + HTableInterface table = null; try { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); byte[] fam = Bytes.toBytes("info"); @@ -175,7 +176,7 @@ public class TestTags { @Test public void testFlushAndCompactionWithoutTags() throws Exception { - HTable table = null; + HTableInterface table = null; try { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); byte[] fam = Bytes.toBytes("info"); @@ -289,7 +290,7 @@ public class TestTags { byte[] row2 = Bytes.toBytes("rowc"); byte[] rowd = Bytes.toBytes("rowd"); byte[] rowe = Bytes.toBytes("rowe"); - HTable table = null; + HTableInterface table = null; for (DataBlockEncoding encoding : DataBlockEncoding.values()) { HTableDescriptor desc = new HTableDescriptor(tableName); HColumnDescriptor colDesc = new HColumnDescriptor(fam); @@ -421,7 +422,7 @@ public class TestTags { desc.addFamily(colDesc); TEST_UTIL.getHBaseAdmin().createTable(desc); - HTable table = null; + HTableInterface table = null; try { table = new HTable(TEST_UTIL.getConfiguration(), tableName); Put put = new Put(row1); @@ -542,8 +543,8 @@ public class TestTags { } } - private void result(byte[] fam, byte[] row, byte[] qual, byte[] row2, HTable table, byte[] value, - byte[] value2, byte[] row1, byte[] value1) throws IOException { + private void result(byte[] fam, byte[] row, byte[] qual, byte[] row2, HTableInterface table, + byte[] value, byte[] value2, byte[] row1, byte[] value1) throws IOException { Scan s = new Scan(row); // If filters are used this attribute can be specifically check for in // filterKV method and diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogFiltering.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogFiltering.java index cc8c531..bbd60a7 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogFiltering.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogFiltering.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.protobuf.RequestConverter; @@ -72,7 +73,7 @@ public class TestHLogFiltering { } private void fillTable() throws IOException, InterruptedException { - HTable table = TEST_UTIL.createTable(TABLE_NAME, FAMILIES, 3, + HTableInterface table = TEST_UTIL.createTable(TABLE_NAME, FAMILIES, 3, Bytes.toBytes("row0"), Bytes.toBytes("row99"), NUM_RS); Random rand = new Random(19387129L); for (int iStoreFile = 0; iStoreFile < 4; ++iStoreFile) { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java index e5e62aa..3643bef 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java @@ -21,6 +21,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.util.concurrent.atomic.AtomicLong; +import org.apache.hadoop.hbase.client.HTableInterface; import org.junit.Assert; import static org.junit.Assert.assertTrue; @@ -131,7 +132,7 @@ public class TestLogRollAbort { desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY)); admin.createTable(desc); - HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); try { HRegionServer server = TEST_UTIL.getRSForFirstRegionInTable(Bytes.toBytes(tableName)); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java index 1c26726..6705a48 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.wal.HLog; @@ -79,7 +80,7 @@ public class TestLogRollPeriod { TEST_UTIL.createTable(tableName, "cf"); try { - HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); try { HRegionServer server = TEST_UTIL.getRSForFirstRegionInTable(Bytes.toBytes(tableName)); HLog log = server.getWAL(); @@ -104,7 +105,7 @@ public class TestLogRollPeriod { try { HRegionServer server = TEST_UTIL.getRSForFirstRegionInTable(Bytes.toBytes(tableName)); HLog log = server.getWAL(); - final HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + final HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); Thread writerThread = new Thread("writer") { @Override diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java index a47c826..a655f4e 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -176,7 +177,7 @@ public class TestLogRolling { this.server = cluster.getRegionServerThreads().get(0).getRegionServer(); this.log = server.getWAL(); - HTable table = createTestTable(this.tableName); + HTableInterface table = createTestTable(this.tableName); server = TEST_UTIL.getRSForFirstRegionInTable(Bytes.toBytes(tableName)); this.log = server.getWAL(); @@ -243,7 +244,7 @@ public class TestLogRolling { return "TestLogRolling"; } - void writeData(HTable table, int rownum) throws IOException { + void writeData(HTableInterface table, int rownum) throws IOException { doPut(table, rownum); // sleep to let the log roller run (if it needs to) @@ -254,7 +255,7 @@ public class TestLogRolling { } } - void validateData(HTable table, int rownum) throws IOException { + void validateData(HTableInterface table, int rownum) throws IOException { String row = "row" + String.format("%1$04d", rownum); Get get = new Get(Bytes.toBytes(row)); get.addFamily(HConstants.CATALOG_FAMILY); @@ -265,7 +266,7 @@ public class TestLogRolling { LOG.info("Validated row " + row); } - void batchWriteAndWait(HTable table, int start, boolean expect, int timeout) + void batchWriteAndWait(HTableInterface table, int start, boolean expect, int timeout) throws IOException { for (int i = 0; i < 10; i++) { Put put = new Put(Bytes.toBytes("row" @@ -336,7 +337,7 @@ public class TestLogRolling { desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY)); admin.createTable(desc); - HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); assertTrue(table.isAutoFlush()); server = TEST_UTIL.getRSForFirstRegionInTable(Bytes.toBytes(tableName)); @@ -433,7 +434,7 @@ public class TestLogRolling { LOG.info("Replication=" + fs.getDefaultReplication(TEST_UTIL.getDataTestDirOnTestFS())); // When the hbase:meta table can be opened, the region servers are running - HTable t = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); + HTableInterface t = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); try { this.server = cluster.getRegionServer(0); this.log = server.getWAL(); @@ -444,7 +445,7 @@ public class TestLogRolling { desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY)); admin.createTable(desc); - HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); server = TEST_UTIL.getRSForFirstRegionInTable(Bytes.toBytes(tableName)); this.log = server.getWAL(); @@ -592,11 +593,11 @@ public class TestLogRolling { */ @Test public void testCompactionRecordDoesntBlockRolling() throws Exception { - HTable table = null; - HTable table2 = null; + HTableInterface table = null; + HTableInterface table2 = null; // When the hbase:meta table can be opened, the region servers are running - HTable t = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); + HTableInterface t = new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); try { String tableName = getName(); table = createTestTable(tableName); @@ -650,13 +651,13 @@ public class TestLogRolling { } } - private void doPut(HTable table, int i) throws IOException { + private void doPut(HTableInterface table, int i) throws IOException { Put put = new Put(Bytes.toBytes("row" + String.format("%1$04d", i))); put.add(HConstants.CATALOG_FAMILY, null, value); table.put(put); } - private HTable createTestTable(String tableName) throws IOException { + private HTableInterface createTestTable(String tableName) throws IOException { // Create the test table and open it HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName)); desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY)); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java index ea368df..67ef531 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java @@ -53,6 +53,7 @@ import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -165,7 +166,7 @@ public class TestWALReplay { byte[] value = Bytes.toBytes("testV"); byte[][] familys = { family1, family2 }; TEST_UTIL.createTable(tableName, familys); - HTable htable = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface htable = new HTable(TEST_UTIL.getConfiguration(), tableName); Put put = new Put(Bytes.toBytes("r1")); put.add(family1, qualifier, value); htable.put(put); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java index 72a1513..a8744f2 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; @@ -123,7 +124,7 @@ public class TestMasterReplication { public void testCyclicReplication1() throws Exception { LOG.info("testSimplePutDelete"); int numClusters = 2; - HTable[] htables = null; + HTableInterface[] htables = null; try { startMiniClusters(numClusters); createTableOnClusters(table); @@ -163,7 +164,7 @@ public class TestMasterReplication { public void testCyclicReplication2() throws Exception { LOG.info("testCyclicReplication1"); int numClusters = 3; - HTable[] htables = null; + HTableInterface[] htables = null; try { startMiniClusters(numClusters); createTableOnClusters(table); @@ -214,7 +215,7 @@ public class TestMasterReplication { public void testCyclicReplication3() throws Exception { LOG.info("testCyclicReplication2"); int numClusters = 3; - HTable[] htables = null; + HTableInterface[] htables = null; try { startMiniClusters(numClusters); createTableOnClusters(table); @@ -354,7 +355,7 @@ public class TestMasterReplication { return htables; } - private void validateCounts(HTable[] htables, byte[] type, + private void validateCounts(HTableInterface[] htables, byte[] type, int[] expectedCounts) throws IOException { for (int i = 0; i < htables.length; i++) { assertEquals(Bytes.toString(type) + " were replicated back ", @@ -362,21 +363,21 @@ public class TestMasterReplication { } } - private int getCount(HTable t, byte[] type) throws IOException { + private int getCount(HTableInterface t, byte[] type) throws IOException { Get test = new Get(row); test.setAttribute("count", new byte[] {}); Result res = t.get(test); return Bytes.toInt(res.getValue(count, type)); } - private void deleteAndWait(byte[] row, HTable source, HTable target) + private void deleteAndWait(byte[] row, HTableInterface source, HTableInterface target) throws Exception { Delete del = new Delete(row); source.delete(del); wait(row, target, true); } - private void putAndWait(byte[] row, byte[] fam, HTable source, HTable target) + private void putAndWait(byte[] row, byte[] fam, HTableInterface source, HTableInterface target) throws Exception { Put put = new Put(row); put.add(fam, row, row); @@ -384,7 +385,7 @@ public class TestMasterReplication { wait(row, target, false); } - private void wait(byte[] row, HTable target, boolean isDeleted) + private void wait(byte[] row, HTableInterface target, boolean isDeleted) throws Exception { Get get = new Get(row); for (int i = 0; i < NB_RETRIES; i++) { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java index f3daa97..aac403d 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; @@ -124,11 +125,11 @@ public class TestMultiSlaveReplication { new HBaseAdmin(conf1).createTable(table); new HBaseAdmin(conf2).createTable(table); new HBaseAdmin(conf3).createTable(table); - HTable htable1 = new HTable(conf1, tableName); + HTableInterface htable1 = new HTable(conf1, tableName); htable1.setWriteBufferSize(1024); - HTable htable2 = new HTable(conf2, tableName); + HTableInterface htable2 = new HTable(conf2, tableName); htable2.setWriteBufferSize(1024); - HTable htable3 = new HTable(conf3, tableName); + HTableInterface htable3 = new HTable(conf3, tableName); htable3.setWriteBufferSize(1024); admin1.addPeer("1", utility2.getClusterKey()); @@ -187,7 +188,7 @@ public class TestMultiSlaveReplication { utility1.shutdownMiniCluster(); } - private void checkWithWait(byte[] row, int count, HTable table) throws Exception { + private void checkWithWait(byte[] row, int count, HTableInterface table) throws Exception { Get get = new Get(row); for (int i = 0; i < NB_RETRIES; i++) { if (i == NB_RETRIES - 1) { @@ -209,15 +210,15 @@ public class TestMultiSlaveReplication { } } - private void checkRow(byte[] row, int count, HTable... tables) throws IOException { + private void checkRow(byte[] row, int count, HTableInterface... tables) throws IOException { Get get = new Get(row); - for (HTable table : tables) { + for (HTableInterface table : tables) { Result res = table.get(get); assertEquals(count, res.size()); } } - private void deleteAndWait(byte[] row, HTable source, HTable... targets) + private void deleteAndWait(byte[] row, HTableInterface source, HTableInterface... targets) throws Exception { Delete del = new Delete(row); source.delete(del); @@ -228,7 +229,7 @@ public class TestMultiSlaveReplication { fail("Waited too much time for del replication"); } boolean removedFromAll = true; - for (HTable target : targets) { + for (HTableInterface target : targets) { Result res = target.get(get); if (res.size() >= 1) { LOG.info("Row not deleted"); @@ -244,11 +245,11 @@ public class TestMultiSlaveReplication { } } - private void putAndWait(byte[] row, byte[] fam, HTable source, HTable... targets) + private void putAndWait(byte[] row, byte[] fam, HTableInterface src, HTableInterface... targets) throws Exception { Put put = new Put(row); put.add(fam, row, row); - source.put(put); + src.put(put); Get get = new Get(row); for (int i = 0; i < NB_RETRIES; i++) { @@ -256,7 +257,7 @@ public class TestMultiSlaveReplication { fail("Waited too much time for put replication"); } boolean replicatedToAll = true; - for (HTable target : targets) { + for (HTableInterface target : targets) { Result res = target.get(get); if (res.size() == 0) { LOG.info("Row not available"); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java index ff77a94..76e2aae 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; @@ -272,17 +273,17 @@ public class TestPerTableCFReplication { new HBaseAdmin(conf3).createTable(tabB); new HBaseAdmin(conf3).createTable(tabC); - HTable htab1A = new HTable(conf1, tabAName); - HTable htab2A = new HTable(conf2, tabAName); - HTable htab3A = new HTable(conf3, tabAName); + HTableInterface htab1A = new HTable(conf1, tabAName); + HTableInterface htab2A = new HTable(conf2, tabAName); + HTableInterface htab3A = new HTable(conf3, tabAName); - HTable htab1B = new HTable(conf1, tabBName); - HTable htab2B = new HTable(conf2, tabBName); - HTable htab3B = new HTable(conf3, tabBName); + HTableInterface htab1B = new HTable(conf1, tabBName); + HTableInterface htab2B = new HTable(conf2, tabBName); + HTableInterface htab3B = new HTable(conf3, tabBName); - HTable htab1C = new HTable(conf1, tabCName); - HTable htab2C = new HTable(conf2, tabCName); - HTable htab3C = new HTable(conf3, tabCName); + HTableInterface htab1C = new HTable(conf1, tabCName); + HTableInterface htab2C = new HTable(conf2, tabCName); + HTableInterface htab3C = new HTable(conf3, tabCName); // A. add cluster2/cluster3 as peers to cluster1 admin1.addPeer("2", utility2.getClusterKey(), "TC;TB:f1,f3"); @@ -371,18 +372,18 @@ public class TestPerTableCFReplication { deleteAndWaitWithFamily(row2, f3Name, htab1C, htab2C, htab3C); } - private void ensureRowNotReplicated(byte[] row, byte[] fam, HTable... tables) throws IOException { + private void ensureRowNotReplicated(byte[] row, byte[] fam, HTableInterface... tables) + throws IOException { Get get = new Get(row); get.addFamily(fam); - for (HTable table : tables) { + for (HTableInterface table : tables) { Result res = table.get(get); assertEquals(0, res.size()); } } - private void deleteAndWaitWithFamily(byte[] row, byte[] fam, - HTable source, HTable... targets) - throws Exception { + private void deleteAndWaitWithFamily(byte[] row, byte[] fam, HTableInterface source, + HTableInterface... targets) throws Exception { Delete del = new Delete(row); del.deleteFamily(fam); source.delete(del); @@ -394,7 +395,7 @@ public class TestPerTableCFReplication { fail("Waited too much time for del replication"); } boolean removedFromAll = true; - for (HTable target : targets) { + for (HTableInterface target : targets) { Result res = target.get(get); if (res.size() >= 1) { LOG.info("Row not deleted"); @@ -411,7 +412,7 @@ public class TestPerTableCFReplication { } private void putAndWaitWithFamily(byte[] row, byte[] fam, - HTable source, HTable... targets) + HTableInterface source, HTableInterface... targets) throws Exception { Put put = new Put(row); put.add(fam, row, val); @@ -424,7 +425,7 @@ public class TestPerTableCFReplication { fail("Waited too much time for put replication"); } boolean replicatedToAll = true; - for (HTable target : targets) { + for (HTableInterface target : targets) { Result res = target.get(get); if (res.size() == 0) { LOG.info("Row not available"); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java index 080d858..bf5c947 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; import org.apache.hadoop.hbase.replication.regionserver.ReplicationSource; import org.apache.hadoop.hbase.util.Bytes; @@ -62,8 +63,8 @@ public class TestReplicationBase { protected static ReplicationAdmin admin; - protected static HTable htable1; - protected static HTable htable2; + protected static HTableInterface htable1; + protected static HTableInterface htable2; protected static HBaseTestingUtility utility1; protected static HBaseTestingUtility utility2; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java index e8620d4..a99554a 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; import org.apache.hadoop.hbase.util.Bytes; @@ -52,7 +53,7 @@ public class TestReplicationSyncUpTool extends TestReplicationBase { private HTableDescriptor t1_syncupSource, t1_syncupTarget; private HTableDescriptor t2_syncupSource, t2_syncupTarget; - private HTable ht1Source, ht2Source, ht1TargetAtPeer1, ht2TargetAtPeer1; + private HTableInterface ht1Source, ht2Source, ht1TargetAtPeer1, ht2TargetAtPeer1; @Before public void setUp() throws Exception { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java index 491a9db..900d1d8 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java @@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; @@ -70,8 +71,8 @@ public class TestReplicationWithTags { private static ReplicationAdmin replicationAdmin; - private static HTable htable1; - private static HTable htable2; + private static HTableInterface htable1; + private static HTableInterface htable2; private static HBaseTestingUtility utility1; private static HBaseTestingUtility utility2; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java index 3a138e4..231ee19 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java @@ -25,6 +25,7 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -69,7 +70,7 @@ public class TestReplicationSink { private static final byte[] FAM_NAME1 = Bytes.toBytes("info1"); private static final byte[] FAM_NAME2 = Bytes.toBytes("info2"); - private static HTable table1; + private static HTableInterface table1; private static Stoppable STOPPABLE = new Stoppable() { final AtomicBoolean stop = new AtomicBoolean(false); @@ -86,7 +87,7 @@ public class TestReplicationSink { }; - private static HTable table2; + private static HTableInterface table2; /** * @throws java.lang.Exception diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java index 8f8e62e..ee85cb3 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Cluster; @@ -102,7 +103,7 @@ public class TestGzipFilter { Response response = client.put(path, headers, value_1_gzip); assertEquals(response.getCode(), 200); - HTable table = new HTable(TEST_UTIL.getConfiguration(), TABLE); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), TABLE); Get get = new Get(Bytes.toBytes(ROW_1)); get.addColumn(Bytes.toBytes(CFA), Bytes.toBytes("1")); Result result = table.get(get); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java index a13280e..cf8120d 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.rest.client.Client; @@ -78,7 +79,7 @@ public class TestScannerResource { throws IOException { Random rng = new Random(); int count = 0; - HTable table = new HTable(conf, tableName); + HTableInterface table = new HTable(conf, tableName); byte[] k = new byte[3]; byte [][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column)); for (byte b1 = 'a'; b1 < 'z'; b1++) { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java index 4634335..cb085f2 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Durability; @@ -135,7 +136,7 @@ public class TestScannersWithFilters { htd.addFamily(new HColumnDescriptor(FAMILIES[0])); htd.addFamily(new HColumnDescriptor(FAMILIES[1])); admin.createTable(htd); - HTable table = new HTable(TEST_UTIL.getConfiguration(), TABLE); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), TABLE); // Insert first half for(byte [] ROW : ROWS_ONE) { Put p = new Put(ROW); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java index edf31b8..a449d0c 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; import org.apache.hadoop.hbase.rest.client.Client; @@ -90,7 +91,7 @@ public class TestScannersWithLabels { private static int insertData(TableName tableName, String column, double prob) throws IOException { Random rng = new Random(); int count = 0; - HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); byte[] k = new byte[3]; byte[][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column)); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java index 120604e..48baf37 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -99,7 +100,7 @@ public class TestRemoteTable { htd.addFamily(new HColumnDescriptor(COLUMN_2).setMaxVersions(3)); htd.addFamily(new HColumnDescriptor(COLUMN_3).setMaxVersions(3)); admin.createTable(htd); - HTable table = null; + HTableInterface table = null; try { table = new HTable(TEST_UTIL.getConfiguration(), TABLE); Put put = new Put(ROW_1); @@ -133,7 +134,7 @@ public class TestRemoteTable { @Test public void testGetTableDescriptor() throws IOException { - HTable table = null; + HTableInterface table = null; try { table = new HTable(TEST_UTIL.getConfiguration(), TABLE); HTableDescriptor local = table.getTableDescriptor(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java index ca27807..a01113b 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Waiter.Predicate; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.io.hfile.HFile; @@ -117,7 +118,7 @@ public class SecureTestUtil { for (Permission p : perms) { request.addPermission(ProtobufUtil.toPermission(p)); } - HTable acl = new HTable(conf, table); + HTableInterface acl = new HTable(conf, table); try { AccessControlService.BlockingInterface protocol = AccessControlService.newBlockingStub(acl.coprocessorService(new byte[0])); @@ -327,7 +328,7 @@ public class SecureTestUtil { SecureTestUtil.updateACLs(util, new Callable() { @Override public Void call() throws Exception { - HTable acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW); AccessControlService.BlockingInterface protocol = @@ -351,7 +352,7 @@ public class SecureTestUtil { SecureTestUtil.updateACLs(util, new Callable() { @Override public Void call() throws Exception { - HTable acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW); AccessControlService.BlockingInterface protocol = @@ -375,7 +376,7 @@ public class SecureTestUtil { SecureTestUtil.updateACLs(util, new Callable() { @Override public Void call() throws Exception { - HTable acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW); AccessControlService.BlockingInterface protocol = @@ -399,7 +400,7 @@ public class SecureTestUtil { SecureTestUtil.updateACLs(util, new Callable() { @Override public Void call() throws Exception { - HTable acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW); AccessControlService.BlockingInterface protocol = @@ -424,7 +425,7 @@ public class SecureTestUtil { SecureTestUtil.updateACLs(util, new Callable() { @Override public Void call() throws Exception { - HTable acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW); AccessControlService.BlockingInterface protocol = @@ -449,7 +450,7 @@ public class SecureTestUtil { SecureTestUtil.updateACLs(util, new Callable() { @Override public Void call() throws Exception { - HTable acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(util.getConfiguration(), AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW); AccessControlService.BlockingInterface protocol = diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java index 427d62d..71f3e27 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -90,7 +91,7 @@ public class TestAccessControlFilter extends SecureTestUtil { @Test public void testQualifierAccess() throws Exception { - final HTable table = TEST_UTIL.createTable(TABLE, FAMILY); + final HTableInterface table = TEST_UTIL.createTable(TABLE, FAMILY); try { doQualifierAccess(table); } finally { @@ -98,7 +99,7 @@ public class TestAccessControlFilter extends SecureTestUtil { } } - private void doQualifierAccess(final HTable table) throws Exception { + private void doQualifierAccess(final HTableInterface table) throws Exception { // set permissions SecureTestUtil.grantOnTable(TEST_UTIL, READER.getShortName(), TABLE, null, null, Permission.Action.READ); @@ -121,7 +122,7 @@ public class TestAccessControlFilter extends SecureTestUtil { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); - HTable t = new HTable(conf, TABLE); + HTableInterface t = new HTable(conf, TABLE); try { ResultScanner rs = t.getScanner(new Scan()); int rowcnt = 0; @@ -147,7 +148,7 @@ public class TestAccessControlFilter extends SecureTestUtil { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); - HTable t = new HTable(conf, TABLE); + HTableInterface t = new HTable(conf, TABLE); try { ResultScanner rs = t.getScanner(new Scan()); int rowcnt = 0; @@ -172,7 +173,7 @@ public class TestAccessControlFilter extends SecureTestUtil { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); - HTable t = new HTable(conf, TABLE); + HTableInterface t = new HTable(conf, TABLE); try { ResultScanner rs = t.getScanner(new Scan()); int rowcnt = 0; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index 6458a55..f37b0d2 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -57,6 +57,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -691,7 +692,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Get g = new Get(TEST_ROW); g.addFamily(TEST_FAMILY); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.get(g); } finally { @@ -709,7 +710,7 @@ public class TestAccessController extends SecureTestUtil { Scan s = new Scan(); s.addFamily(TEST_FAMILY); - HTable table = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface table = new HTable(conf, TEST_TABLE.getTableName()); try { ResultScanner scanner = table.getScanner(s); try { @@ -738,7 +739,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Put p = new Put(TEST_ROW); p.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(1)); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.put(p); } finally { @@ -755,7 +756,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Delete d = new Delete(TEST_ROW); d.deleteFamily(TEST_FAMILY); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.delete(d); } finally { @@ -772,7 +773,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Increment inc = new Increment(TEST_ROW); inc.addColumn(TEST_FAMILY, TEST_QUALIFIER, 1); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.increment(inc); } finally { @@ -792,7 +793,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Delete d = new Delete(TEST_ROW); d.deleteFamily(TEST_FAMILY); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.checkAndDelete(TEST_ROW, TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes("test_value"), d); @@ -810,7 +811,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Put p = new Put(TEST_ROW); p.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(1)); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.checkAndPut(TEST_ROW, TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes("test_value"), p); @@ -948,7 +949,7 @@ public class TestAccessController extends SecureTestUtil { put.add(TEST_FAMILY, qualifier, Bytes.toBytes(1)); Append append = new Append(row); append.add(TEST_FAMILY, qualifier, Bytes.toBytes(2)); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.put(put); t.append(append); @@ -968,7 +969,7 @@ public class TestAccessController extends SecureTestUtil { AccessTestAction grantAction = new AccessTestAction() { @Override public Object run() throws Exception { - HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(TEST_TABLE.getTableName().getName()); AccessControlService.BlockingInterface protocol = @@ -985,7 +986,7 @@ public class TestAccessController extends SecureTestUtil { AccessTestAction revokeAction = new AccessTestAction() { @Override public Object run() throws Exception { - HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(TEST_TABLE.getTableName().getName()); AccessControlService.BlockingInterface protocol = @@ -1002,7 +1003,7 @@ public class TestAccessController extends SecureTestUtil { AccessTestAction getPermissionsAction = new AccessTestAction() { @Override public Object run() throws Exception { - HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(TEST_TABLE.getTableName().getName()); AccessControlService.BlockingInterface protocol = @@ -1057,7 +1058,7 @@ public class TestAccessController extends SecureTestUtil { Put p = new Put(Bytes.toBytes("a")); p.add(family1, qualifier, Bytes.toBytes("v1")); p.add(family2, qualifier, Bytes.toBytes("v2")); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.put(p); } finally { @@ -1072,7 +1073,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Put p = new Put(Bytes.toBytes("a")); p.add(family1, qualifier, Bytes.toBytes("v1")); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.put(p); } finally { @@ -1087,7 +1088,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Put p = new Put(Bytes.toBytes("a")); p.add(family2, qualifier, Bytes.toBytes("v2")); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.put(p); } finally { @@ -1103,7 +1104,7 @@ public class TestAccessController extends SecureTestUtil { Get g = new Get(TEST_ROW); g.addFamily(family1); g.addFamily(family2); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.get(g); } finally { @@ -1118,7 +1119,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Get g = new Get(TEST_ROW); g.addFamily(family1); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.get(g); } finally { @@ -1133,7 +1134,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Get g = new Get(TEST_ROW); g.addFamily(family2); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.get(g); } finally { @@ -1149,7 +1150,7 @@ public class TestAccessController extends SecureTestUtil { Delete d = new Delete(TEST_ROW); d.deleteFamily(family1); d.deleteFamily(family2); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.delete(d); } finally { @@ -1164,7 +1165,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Delete d = new Delete(TEST_ROW); d.deleteFamily(family1); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.delete(d); } finally { @@ -1179,7 +1180,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Delete d = new Delete(TEST_ROW); d.deleteFamily(family2); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.delete(d); } finally { @@ -1326,7 +1327,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Get g = new Get(TEST_ROW); g.addColumn(family1, qualifier); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.get(g); } finally { @@ -1341,7 +1342,7 @@ public class TestAccessController extends SecureTestUtil { public Object run() throws Exception { Put p = new Put(TEST_ROW); p.add(family1, qualifier, Bytes.toBytes("v1")); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.put(p); } finally { @@ -1357,7 +1358,7 @@ public class TestAccessController extends SecureTestUtil { Delete d = new Delete(TEST_ROW); d.deleteColumn(family1, qualifier); // d.deleteFamily(family1); - HTable t = new HTable(conf, tableName); + HTableInterface t = new HTable(conf, tableName); try { t.delete(d); } finally { @@ -1435,7 +1436,7 @@ public class TestAccessController extends SecureTestUtil { List perms; - HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(tableName.getName()); AccessControlService.BlockingInterface protocol = @@ -1548,7 +1549,7 @@ public class TestAccessController extends SecureTestUtil { @Test public void testGlobalPermissionList() throws Exception { List perms; - HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW); AccessControlService.BlockingInterface protocol = @@ -1583,7 +1584,7 @@ public class TestAccessController extends SecureTestUtil { AccessControlProtos.GlobalPermission.newBuilder() .addAction(ProtobufUtil.toPermissionAction(a)).build())); } - HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel channel = acl.coprocessorService(new byte[0]); AccessControlService.BlockingInterface protocol = @@ -1613,7 +1614,7 @@ public class TestAccessController extends SecureTestUtil { for (Permission p : perms) { request.addPermission(ProtobufUtil.toPermission(p)); } - HTable acl = new HTable(conf, table); + HTableInterface acl = new HTable(conf, table); try { AccessControlService.BlockingInterface protocol = AccessControlService.newBlockingStub(acl.coprocessorService(new byte[0])); @@ -1763,7 +1764,7 @@ public class TestAccessController extends SecureTestUtil { .setTableName(ProtobufUtil.toProtoTableName(TEST_TABLE.getTableName())) .addAction(AccessControlProtos.Permission.Action.CREATE)) ).build(); - HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel channel = acl.coprocessorService(new byte[0]); AccessControlService.BlockingInterface protocol = @@ -2022,7 +2023,7 @@ public class TestAccessController extends SecureTestUtil { AccessTestAction getAction = new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(new Get(TEST_ROW)); } finally { @@ -2112,7 +2113,7 @@ public class TestAccessController extends SecureTestUtil { AccessTestAction execEndpointAction = new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { BlockingRpcChannel service = t.coprocessorService(HConstants.EMPTY_BYTE_ARRAY); PingCoprocessor.newBlockingStub(service).noop(null, NoopRequest.newBuilder().build()); @@ -2141,7 +2142,7 @@ public class TestAccessController extends SecureTestUtil { AccessTestAction putWithReservedTag = new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { KeyValue kv = new KeyValue(TEST_ROW, TEST_FAMILY, TEST_QUALIFIER, HConstants.LATEST_TIMESTAMP, HConstants.EMPTY_BYTE_ARRAY, diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java index e3ea397..55b8d67 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; @@ -145,7 +146,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p; // with ro ACL @@ -179,7 +180,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { public Object run() throws Exception { Get get = new Get(TEST_ROW); get.setMaxVersions(10); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { @@ -193,7 +194,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { public Object run() throws Exception { Get get = new Get(TEST_ROW); get.setMaxVersions(10); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { @@ -210,7 +211,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p; p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO); @@ -249,7 +250,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { // with rw ACL for "user1" Put p = new Put(TEST_ROW1); @@ -275,7 +276,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { // with rw ACL for "user1" and "user2" Put p = new Put(TEST_ROW1); @@ -306,7 +307,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user1.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1); d.deleteColumns(TEST_FAMILY1, TEST_Q1); @@ -323,7 +324,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user2.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW2); d.deleteColumns(TEST_FAMILY1, TEST_Q1); @@ -342,7 +343,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user1.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW2); d.deleteFamily(TEST_FAMILY1); @@ -363,7 +364,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { // Store read only ACL at a future time Put p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, @@ -389,7 +390,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { @Override public Object run() throws Exception { Get get = new Get(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { @@ -402,7 +403,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { @Override public Object run() throws Exception { Get get = new Get(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q2); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { @@ -422,7 +423,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { @Override public Object run() throws Exception { Delete delete = new Delete(TEST_ROW).deleteFamily(TEST_FAMILY1); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.delete(delete); } finally { @@ -448,7 +449,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { USER_OWNER.runAs(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { // This version (TS = 123) with rw ACL for USER_OTHER and USER_OTHER2 Put p = new Put(TEST_ROW); @@ -493,7 +494,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { USER_OTHER2.runAs(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW, 124L); d.deleteColumns(TEST_FAMILY1, TEST_Q1); @@ -509,7 +510,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { USER_OTHER2.runAs(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW); d.deleteColumns(TEST_FAMILY1, TEST_Q2, 124L); @@ -535,7 +536,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Map permsU1andOwner = new HashMap(); permsU1andOwner.put(user1.getShortName(), new Permission(Permission.Action.READ, @@ -592,7 +593,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user1.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1); d.deleteColumn(TEST_FAMILY1, TEST_Q1, 123); @@ -609,7 +610,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user2.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1, 127); d.deleteColumns(TEST_FAMILY1, TEST_Q1); @@ -640,7 +641,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Map permsU1andOwner = new HashMap(); permsU1andOwner.put(user1.getShortName(), new Permission(Permission.Action.READ, @@ -680,7 +681,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user1.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Increment inc = new Increment(TEST_ROW1); inc.setTimeRange(0, 123); @@ -697,7 +698,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user2.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Increment inc = new Increment(TEST_ROW1); inc.setTimeRange(0, 127); @@ -727,7 +728,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Map permsU1andOwner = new HashMap(); permsU1andOwner.put(user1.getShortName(), new Permission(Permission.Action.READ, @@ -769,7 +770,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user1.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p = new Put(TEST_ROW1); p.add(TEST_FAMILY1, TEST_Q1, 125, ZERO); @@ -788,7 +789,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user2.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p = new Put(TEST_ROW1); // column Q1 covers version at 123 fr which user2 do not have permission @@ -817,7 +818,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Map permsU1andOwner = new HashMap(); permsU1andOwner.put(user1.getShortName(), new Permission(Permission.Action.READ, @@ -870,7 +871,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user1.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1); d.deleteColumns(TEST_FAMILY1, TEST_Q1, 120); @@ -886,7 +887,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user2.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1); d.deleteColumns(TEST_FAMILY1, TEST_Q1); @@ -905,7 +906,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { user2.runAs(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Delete d = new Delete(TEST_ROW1); d.deleteColumn(TEST_FAMILY1, TEST_Q2, 120); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java index 555986c..dc07079 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -141,7 +142,7 @@ public class TestCellACLs extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p; // with ro ACL @@ -170,7 +171,7 @@ public class TestCellACLs extends SecureTestUtil { @Override public Object run() throws Exception { Get get = new Get(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q1); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { @@ -183,7 +184,7 @@ public class TestCellACLs extends SecureTestUtil { @Override public Object run() throws Exception { Get get = new Get(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q2); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { @@ -196,7 +197,7 @@ public class TestCellACLs extends SecureTestUtil { @Override public Object run() throws Exception { Get get = new Get(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q3); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { @@ -209,7 +210,7 @@ public class TestCellACLs extends SecureTestUtil { @Override public Object run() throws Exception { Get get = new Get(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q4); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(get).listCells(); } finally { @@ -241,7 +242,7 @@ public class TestCellACLs extends SecureTestUtil { scan.setStartRow(TEST_ROW); scan.setStopRow(Bytes.add(TEST_ROW, new byte[]{ 0 } )); scan.addFamily(TEST_FAMILY); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { ResultScanner scanner = t.getScanner(scan); Result result = null; @@ -274,7 +275,7 @@ public class TestCellACLs extends SecureTestUtil { @Override public Object run() throws Exception { Increment i = new Increment(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q1, 1L); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.increment(i); } finally { @@ -288,7 +289,7 @@ public class TestCellACLs extends SecureTestUtil { @Override public Object run() throws Exception { Increment i = new Increment(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q2, 1L); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.increment(i); } finally { @@ -304,7 +305,7 @@ public class TestCellACLs extends SecureTestUtil { Increment i = new Increment(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q2, 1L); // Tag this increment with an ACL that denies write permissions to USER_OTHER i.setACL(USER_OTHER.getShortName(), new Permission(Action.READ)); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.increment(i); } finally { @@ -318,7 +319,7 @@ public class TestCellACLs extends SecureTestUtil { @Override public Object run() throws Exception { Increment i = new Increment(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q3, 1L); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.increment(i); } finally { @@ -345,7 +346,7 @@ public class TestCellACLs extends SecureTestUtil { @Override public Object run() throws Exception { Delete delete = new Delete(TEST_ROW).deleteFamily(TEST_FAMILY); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.delete(delete); } finally { @@ -359,7 +360,7 @@ public class TestCellACLs extends SecureTestUtil { @Override public Object run() throws Exception { Delete delete = new Delete(TEST_ROW).deleteColumn(TEST_FAMILY, TEST_Q1); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { t.delete(delete); } finally { @@ -390,7 +391,7 @@ public class TestCellACLs extends SecureTestUtil { verifyDenied(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p; p = new Put(TEST_ROW).add(TEST_FAMILY, TEST_Q1, ZERO); @@ -406,7 +407,7 @@ public class TestCellACLs extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p; p = new Put(TEST_ROW).add(TEST_FAMILY, TEST_Q1, ZERO); @@ -422,7 +423,7 @@ public class TestCellACLs extends SecureTestUtil { verifyDenied(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Put p; p = new Put(TEST_ROW).add(TEST_FAMILY, TEST_Q1, ONE); @@ -438,7 +439,7 @@ public class TestCellACLs extends SecureTestUtil { verifyAllowed(new AccessTestAction() { @Override public Object run() throws Exception { - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { return t.get(new Get(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q1)); } finally { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java index 7525ff9..10e512d 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.ObserverContext; @@ -96,7 +97,7 @@ public class TestNamespaceCommands extends SecureTestUtil { @Test public void testAclTableEntries() throws Exception { String userTestNamespace = "userTestNsp"; - HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { // Grant and check state in ACL table grantOnNamespace(UTIL, userTestNamespace, TestNamespace, @@ -151,7 +152,7 @@ public class TestNamespaceCommands extends SecureTestUtil { AccessTestAction grantAction = new AccessTestAction() { public Object run() throws Exception { - HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW); @@ -167,7 +168,7 @@ public class TestNamespaceCommands extends SecureTestUtil { AccessTestAction revokeAction = new AccessTestAction() { public Object run() throws Exception { - HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + HTableInterface acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); try { BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java index 0a1397b..7910d18 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; @@ -155,7 +156,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { public Object run() throws Exception { // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Put put = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO); t.put(put); @@ -180,7 +181,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { public Object run() throws Exception { // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Scan scan = new Scan().addFamily(TEST_FAMILY1); Result result = t.getScanner(scan).next(); @@ -204,7 +205,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { public Object run() throws Exception { // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Scan scan = new Scan(); Result result = t.getScanner(scan).next(); @@ -226,7 +227,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { public Object run() throws Exception { // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Scan scan = new Scan().addFamily(TEST_FAMILY2); Result result = t.getScanner(scan).next(); @@ -252,7 +253,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { public Object run() throws Exception { // force a new RS connection conf.set("testkey", UUID.randomUUID().toString()); - HTable t = new HTable(conf, TEST_TABLE.getTableName()); + HTableInterface t = new HTable(conf, TEST_TABLE.getTableName()); try { Scan scan = new Scan(); Result result = t.getScanner(scan).next(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java index b81bf4f..7d34ad7 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java @@ -40,6 +40,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.LargeTests; @@ -288,7 +289,7 @@ public class TestTablePermissions { ListMultimap preperms = AccessControlLists.getTablePermissions(conf, TEST_TABLE); - HTable table = new HTable(conf, TEST_TABLE); + HTableInterface table = new HTable(conf, TEST_TABLE); table.put(new Put(Bytes.toBytes("row1")) .add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes("v1"))); table.put(new Put(Bytes.toBytes("row2")) diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java index d4cbf4d..85c703c 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.security.User; @@ -97,7 +98,7 @@ public class TestEnforcingScanLabelGenerator { SUPERUSER.runAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - HTable table = TEST_UTIL.createTable(tableName, CF); + HTableInterface table = TEST_UTIL.createTable(tableName, CF); try { Put put = new Put(ROW_1); put.add(CF, Q1, HConstants.LATEST_TIMESTAMP, value); @@ -119,7 +120,7 @@ public class TestEnforcingScanLabelGenerator { TESTUSER.runAs(new PrivilegedExceptionAction() { public Void run() throws Exception { - HTable table = new HTable(conf, tableName); + HTableInterface table = new HTable(conf, tableName); try { // Test that we enforce the defined set Get get = new Get(ROW_1); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java index 84ecf6a..a2b2c47 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -129,7 +130,7 @@ public class TestVisibilityLabels { @Test public void testSimpleVisibilityLabels() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "|" + CONFIDENTIAL, + HTableInterface table = createTableAndWriteDataWithLabels(tableName, SECRET + "|" + CONFIDENTIAL, PRIVATE + "|" + CONFIDENTIAL); try { Scan s = new Scan(); @@ -158,7 +159,7 @@ public class TestVisibilityLabels { @Test public void testVisibilityLabelsWithComplexLabels() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + HTableInterface table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")" + "&" + "!" + TOPSECRET, "(" + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")", "(" + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")", "(" + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")"); @@ -193,7 +194,7 @@ public class TestVisibilityLabels { @Test public void testVisibilityLabelsThatDoesNotPassTheCriteria() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + HTableInterface table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", PRIVATE); try { Scan s = new Scan(); @@ -221,7 +222,7 @@ public class TestVisibilityLabels { @Test public void testVisibilityLabelsInScanThatDoesNotMatchAnyDefinedLabels() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + HTableInterface table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", PRIVATE); try { Scan s = new Scan(); @@ -239,7 +240,7 @@ public class TestVisibilityLabels { @Test public void testVisibilityLabelsWithGet() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + "&!" + HTableInterface table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + "&!" + PRIVATE, SECRET + "&" + CONFIDENTIAL + "&" + PRIVATE); try { Get get = new Get(row1); @@ -322,7 +323,7 @@ public class TestVisibilityLabels { } TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000); t.join(); - HTable table = null; + HTableInterface table = null; try { table = new HTable(TEST_UTIL.getConfiguration(), tableName); Scan s = new Scan(); @@ -340,7 +341,7 @@ public class TestVisibilityLabels { @Test(timeout = 60 * 1000) public void testVisibilityLabelsOnRSRestart() throws Exception { final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + HTableInterface table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", PRIVATE); List regionServerThreads = TEST_UTIL.getHBaseCluster() .getRegionServerThreads(); @@ -389,7 +390,7 @@ public class TestVisibilityLabels { // Scan the visibility label Scan s = new Scan(); s.setAuthorizations(new Authorizations(VisibilityUtils.SYSTEM_LABEL)); - HTable ht = new HTable(conf, LABELS_TABLE_NAME.getName()); + HTableInterface ht = new HTable(conf, LABELS_TABLE_NAME.getName()); int i = 0; try { ResultScanner scanner = ht.getScanner(s); @@ -434,7 +435,7 @@ public class TestVisibilityLabels { @Test public void testVisibilityLabelsInGetThatDoesNotMatchAnyDefinedLabels() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + HTableInterface table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", PRIVATE); try { Get get = new Get(row1); @@ -489,7 +490,7 @@ public class TestVisibilityLabels { } }; SUPERUSER.runAs(action); - HTable ht = null; + HTableInterface ht = null; try { ht = new HTable(conf, LABELS_TABLE_NAME); Scan scan = new Scan(); @@ -587,7 +588,7 @@ public class TestVisibilityLabels { assertEquals("org.apache.hadoop.hbase.security.visibility.InvalidLabelException", resultList.get(1).getException().getName()); assertTrue(resultList.get(2).getException().getValue().isEmpty()); - HTable ht = null; + HTableInterface ht = null; try { ht = new HTable(conf, LABELS_TABLE_NAME); ResultScanner scanner = ht.getScanner(new Scan()); @@ -629,7 +630,7 @@ public class TestVisibilityLabels { @Test public void testLabelsWithCheckAndPut() throws Throwable { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = TEST_UTIL.createTable(tableName, fam); byte[] row1 = Bytes.toBytes("row1"); @@ -661,7 +662,7 @@ public class TestVisibilityLabels { @Test public void testLabelsWithIncrement() throws Throwable { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = TEST_UTIL.createTable(tableName, fam); byte[] row1 = Bytes.toBytes("row1"); @@ -693,7 +694,7 @@ public class TestVisibilityLabels { @Test public void testLabelsWithAppend() throws Throwable { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = TEST_UTIL.createTable(tableName, fam); byte[] row1 = Bytes.toBytes("row1"); @@ -781,7 +782,7 @@ public class TestVisibilityLabels { col.setMaxVersions(5); desc.addFamily(col); TEST_UTIL.getHBaseAdmin().createTable(desc); - HTable table = null; + HTableInterface table = null; try { table = new HTable(TEST_UTIL.getConfiguration(), tableName); Put put = new Put(r1); @@ -871,7 +872,7 @@ public class TestVisibilityLabels { HColumnDescriptor col = new HColumnDescriptor(fam); desc.addFamily(col); TEST_UTIL.getHBaseAdmin().createTable(desc); - HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); try { Put p1 = new Put(row1); p1.add(fam, qual, value); @@ -902,9 +903,9 @@ public class TestVisibilityLabels { } } - private static HTable createTableAndWriteDataWithLabels(TableName tableName, String... labelExps) + private static HTableInterface createTableAndWriteDataWithLabels(TableName tableName, String... labelExps) throws Exception { - HTable table = null; + HTableInterface table = null; try { table = TEST_UTIL.createTable(tableName, fam); int i = 1; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java index 15b3136..1f48422 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -114,7 +115,7 @@ public class TestVisibilityLabelsWithACL { String user = "user2"; VisibilityClient.setAuths(conf, auths, user); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + final HTableInterface table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + "&!" + PRIVATE, SECRET + "&!" + PRIVATE); SecureTestUtil.grantOnTable(TEST_UTIL, NORMAL_USER2.getShortName(), tableName, null, null, Permission.Action.READ); @@ -122,7 +123,7 @@ public class TestVisibilityLabelsWithACL { public Void run() throws Exception { Scan s = new Scan(); s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); - HTable t = new HTable(conf, table.getTableName()); + HTableInterface t = new HTable(conf, table.getTableName()); try { ResultScanner scanner = t.getScanner(s); Result result = scanner.next(); @@ -145,13 +146,13 @@ public class TestVisibilityLabelsWithACL { String user = "admin"; VisibilityClient.setAuths(conf, auths, user); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + final HTableInterface table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + "&!" + PRIVATE, SECRET + "&!" + PRIVATE); PrivilegedExceptionAction scanAction = new PrivilegedExceptionAction() { public Void run() throws Exception { Scan s = new Scan(); s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); - HTable t = new HTable(conf, table.getTableName()); + HTableInterface t = new HTable(conf, table.getTableName()); try { ResultScanner scanner = t.getScanner(s); Result[] result = scanner.next(5); @@ -171,13 +172,13 @@ public class TestVisibilityLabelsWithACL { String user = "admin"; VisibilityClient.setAuths(conf, auths, user); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL - + "&!" + PRIVATE, SECRET + "&!" + PRIVATE); + final HTableInterface table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + + CONFIDENTIAL + "&!" + PRIVATE, SECRET + "&!" + PRIVATE); PrivilegedExceptionAction scanAction = new PrivilegedExceptionAction() { public Void run() throws Exception { Get g = new Get(row1); g.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); - HTable t = new HTable(conf, table.getTableName()); + HTableInterface t = new HTable(conf, table.getTableName()); try { Result result = t.get(g); assertTrue(!result.isEmpty()); @@ -197,7 +198,7 @@ public class TestVisibilityLabelsWithACL { VisibilityClient.clearAuths(conf, auths, user); // Removing all auths if any. VisibilityClient.setAuths(conf, auths, "user1"); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET); + final HTableInterface table = createTableAndWriteDataWithLabels(tableName, SECRET); SecureTestUtil.grantOnTable(TEST_UTIL, NORMAL_USER1.getShortName(), tableName, null, null, Permission.Action.READ); SecureTestUtil.grantOnTable(TEST_UTIL, NORMAL_USER2.getShortName(), tableName, @@ -206,7 +207,7 @@ public class TestVisibilityLabelsWithACL { public Void run() throws Exception { Get g = new Get(row1); g.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); - HTable t = new HTable(conf, table.getTableName()); + HTableInterface t = new HTable(conf, table.getTableName()); try { Result result = t.get(g); assertTrue(result.isEmpty()); @@ -307,9 +308,9 @@ public class TestVisibilityLabelsWithACL { assertTrue(authsList.contains(PRIVATE)); } - private static HTable createTableAndWriteDataWithLabels(TableName tableName, String... labelExps) + private static HTableInterface createTableAndWriteDataWithLabels(TableName tableName, String... labelExps) throws Exception { - HTable table = null; + HTableInterface table = null; try { table = TEST_UTIL.createTable(tableName, fam); int i = 1; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java index 95db8d7..403e1d9 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -111,12 +112,12 @@ public class TestVisibilityLabelsWithDeletes { public void testVisibilityLabelsWithDeleteColumns() throws Throwable { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + TOPSECRET, + final HTableInterface table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + TOPSECRET, SECRET); try { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); @@ -156,13 +157,13 @@ public class TestVisibilityLabelsWithDeletes { public void testVisibilityLabelsWithDeleteFamily() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET, CONFIDENTIAL + "|" + final HTableInterface table = createTableAndWriteDataWithLabels(tableName, SECRET, CONFIDENTIAL + "|" + TOPSECRET); try { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row2); d.setCellVisibility(new CellVisibility(TOPSECRET + "|" + CONFIDENTIAL)); d.deleteFamily(fam); @@ -198,12 +199,12 @@ public class TestVisibilityLabelsWithDeletes { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); long[] ts = new long[] { 123l, 125l }; - final HTable table = createTableAndWriteDataWithLabels(tableName, ts, CONFIDENTIAL + "|" + final HTableInterface table = createTableAndWriteDataWithLabels(tableName, ts, CONFIDENTIAL + "|" + TOPSECRET, SECRET); try { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); @@ -243,12 +244,12 @@ public class TestVisibilityLabelsWithDeletes { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); long[] ts = new long[] { 123l, 125l }; - final HTable table = createTableAndWriteDataWithLabels(tableName, ts, CONFIDENTIAL + "|" + final HTableInterface table = createTableAndWriteDataWithLabels(tableName, ts, CONFIDENTIAL + "|" + TOPSECRET, SECRET); try { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); @@ -287,14 +288,14 @@ public class TestVisibilityLabelsWithDeletes { public void testVisibilityLabelsWithDeleteColumnsWithMultipleVersions() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" + SECRET + "&" + TOPSECRET+")")); @@ -348,14 +349,14 @@ public class TestVisibilityLabelsWithDeletes { throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(CONFIDENTIAL)); d.deleteColumns(fam, qual); @@ -404,14 +405,14 @@ public class TestVisibilityLabelsWithDeletes { throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(CONFIDENTIAL)); d.deleteColumns(fam, qual); @@ -464,14 +465,14 @@ public class TestVisibilityLabelsWithDeletes { throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(CONFIDENTIAL)); d.deleteFamily(fam); @@ -517,7 +518,7 @@ public class TestVisibilityLabelsWithDeletes { @Test public void testVisibilityLabelsWithDeleteFamilyWithPutsReAppearing() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin(); HColumnDescriptor colDesc = new HColumnDescriptor(fam); @@ -538,7 +539,7 @@ public class TestVisibilityLabelsWithDeletes { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(CONFIDENTIAL)); d.deleteFamily(fam); @@ -564,7 +565,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET)); d.deleteFamily(fam); @@ -599,7 +600,7 @@ public class TestVisibilityLabelsWithDeletes { @Test public void testVisibilityLabelsWithDeleteColumnsWithPutsReAppearing() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin(); HColumnDescriptor colDesc = new HColumnDescriptor(fam); @@ -620,7 +621,7 @@ public class TestVisibilityLabelsWithDeletes { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(CONFIDENTIAL)); d.deleteColumns(fam, qual); @@ -646,7 +647,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET)); d.deleteColumns(fam, qual); @@ -681,7 +682,7 @@ public class TestVisibilityLabelsWithDeletes { @Test public void testVisibilityCombinations() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin(); HColumnDescriptor colDesc = new HColumnDescriptor(fam); @@ -702,7 +703,7 @@ public class TestVisibilityLabelsWithDeletes { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET)); d.deleteColumns(fam, qual, 126l); @@ -737,7 +738,7 @@ public class TestVisibilityLabelsWithDeletes { public void testVisibilityLabelsWithDeleteColumnWithSpecificVersionWithPutsReAppearing() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin(); HColumnDescriptor colDesc = new HColumnDescriptor(fam); @@ -765,7 +766,7 @@ public class TestVisibilityLabelsWithDeletes { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(CONFIDENTIAL)); d.deleteColumn(fam, qual, 123l); @@ -803,14 +804,14 @@ public class TestVisibilityLabelsWithDeletes { throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(CONFIDENTIAL)); d.deleteFamily(fam); @@ -862,14 +863,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteFamilyAndDeleteColumnsWithAndWithoutVisibilityExp() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.deleteFamily(fam); table.delete(d); @@ -925,9 +926,9 @@ public class TestVisibilityLabelsWithDeletes { } } - private HTable doPuts(TableName tableName) throws IOException, InterruptedIOException, + private HTableInterface doPuts(TableName tableName) throws IOException, InterruptedIOException, RetriesExhaustedWithDetailsException, InterruptedException { - HTable table; + HTableInterface table; Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin(); HColumnDescriptor colDesc = new HColumnDescriptor(fam); colDesc.setMaxVersions(5); @@ -967,9 +968,9 @@ public class TestVisibilityLabelsWithDeletes { return table; } - private HTable doPutsWithDiffCols(TableName tableName) throws IOException, + private HTableInterface doPutsWithDiffCols(TableName tableName) throws IOException, InterruptedIOException, RetriesExhaustedWithDetailsException, InterruptedException { - HTable table; + HTableInterface table; Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin(); HColumnDescriptor colDesc = new HColumnDescriptor(fam); colDesc.setMaxVersions(5); @@ -1002,9 +1003,9 @@ public class TestVisibilityLabelsWithDeletes { return table; } - private HTable doPutsWithoutVisibility(TableName tableName) throws IOException, + private HTableInterface doPutsWithoutVisibility(TableName tableName) throws IOException, InterruptedIOException, RetriesExhaustedWithDetailsException, InterruptedException { - HTable table; + HTableInterface table; Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin(); HColumnDescriptor colDesc = new HColumnDescriptor(fam); colDesc.setMaxVersions(5); @@ -1040,14 +1041,14 @@ public class TestVisibilityLabelsWithDeletes { throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" + SECRET + "&" + TOPSECRET+")")); @@ -1110,14 +1111,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteColumnWithLatestTimeStampUsingMultipleVersions() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET)); d.deleteColumn(fam, qual); @@ -1174,7 +1175,7 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteColumnWithLatestTimeStampWhenNoVersionMatches() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); @@ -1185,7 +1186,7 @@ public class TestVisibilityLabelsWithDeletes { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET )); d.deleteColumn(fam, qual); @@ -1266,14 +1267,14 @@ public class TestVisibilityLabelsWithDeletes { throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET)); d.deleteColumn(fam, qual); @@ -1337,14 +1338,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteFamilyLatestTimeStampWithMulipleVersions() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET)); d.deleteFamily(fam); @@ -1391,14 +1392,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteColumnswithMultipleColumnsWithMultipleVersions() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPutsWithDiffCols(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET)); d.deleteColumns(fam, qual, 125l); @@ -1453,7 +1454,7 @@ public class TestVisibilityLabelsWithDeletes { @Test public void testDeleteColumnsWithDiffColsAndTags() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin(); HColumnDescriptor colDesc = new HColumnDescriptor(fam); @@ -1474,7 +1475,7 @@ public class TestVisibilityLabelsWithDeletes { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET)); d.deleteColumns(fam, qual, 126l); @@ -1506,7 +1507,7 @@ public class TestVisibilityLabelsWithDeletes { @Test public void testDeleteColumnsWithDiffColsAndTags1() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin(); HColumnDescriptor colDesc = new HColumnDescriptor(fam); @@ -1527,7 +1528,7 @@ public class TestVisibilityLabelsWithDeletes { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET)); d.deleteColumns(fam, qual, 126l); @@ -1560,14 +1561,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteFamilyWithoutCellVisibilityWithMulipleVersions() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPutsWithoutVisibility(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.deleteFamily(fam); table.delete(d); @@ -1604,13 +1605,13 @@ public class TestVisibilityLabelsWithDeletes { throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPutsWithoutVisibility(tableName); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET)); d.deleteFamily(fam); @@ -1671,14 +1672,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteFamilySpecificTimeStampWithMulipleVersions() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" + SECRET + "&" + TOPSECRET + ")")); @@ -1731,14 +1732,14 @@ public class TestVisibilityLabelsWithDeletes { public void testScanAfterCompaction() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" + SECRET + "&" + TOPSECRET+")")); @@ -1789,14 +1790,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteFamilySpecificTimeStampWithMulipleVersionsDoneTwice() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { // Do not flush here. table = doPuts(tableName); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" + TOPSECRET + "&" + SECRET+")")); @@ -1847,7 +1848,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET + "&" + SECRET+")")); @@ -1905,12 +1906,12 @@ public class TestVisibilityLabelsWithDeletes { }; VisibilityLabelsResponse response = SUPERUSER.runAs(action); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = doPuts(tableName); + HTableInterface table = doPuts(tableName); try { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(CONFIDENTIAL)); d.deleteFamilyVersion(fam, 123l); @@ -1961,12 +1962,12 @@ public class TestVisibilityLabelsWithDeletes { public void testSpecificDeletesFollowedByDeleteFamily() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = doPuts(tableName); + HTableInterface table = doPuts(tableName); try { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET + "&" + SECRET + ")")); @@ -2011,7 +2012,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(CONFIDENTIAL)); d.deleteFamily(fam); @@ -2062,12 +2063,12 @@ public class TestVisibilityLabelsWithDeletes { }; VisibilityLabelsResponse response = SUPERUSER.runAs(action); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = doPuts(tableName); + HTableInterface table = doPuts(tableName); try { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET + "&" + SECRET + ")")); @@ -2113,7 +2114,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(CONFIDENTIAL)); d.deleteFamily(fam); @@ -2154,14 +2155,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteColumnSpecificTimeStampWithMulipleVersionsDoneTwice() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { // Do not flush here. table = doPuts(tableName); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET)); d.deleteColumn(fam, qual, 125l); @@ -2211,7 +2212,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET + "&" + SECRET+")")); @@ -2263,14 +2264,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteColumnSpecificTimeStampWithMulipleVersionsDoneTwice1() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { // Do not flush here. table = doPuts(tableName); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")" + "|(" + TOPSECRET + "&" + SECRET + ")")); @@ -2321,7 +2322,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET)); d.deleteColumn(fam, qual, 127l); @@ -2376,14 +2377,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteColumnSpecificTimeStampWithMulipleVersionsDoneTwice2() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { // Do not flush here. table = doPuts(tableName); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" + TOPSECRET + "&" + SECRET+")")); @@ -2439,7 +2440,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET + "&" + SECRET+")")); @@ -2496,14 +2497,14 @@ public class TestVisibilityLabelsWithDeletes { throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { // Do not flush here. table = doPuts(tableName); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET)); d.deleteColumn(fam, qual, 125l); @@ -2553,7 +2554,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET + "&" + SECRET+")")); @@ -2615,14 +2616,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDiffDeleteTypesForTheSameCellUsingMultipleVersions() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { // Do not flush here. table = doPuts(tableName); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" + TOPSECRET + "&" + SECRET+")")); @@ -2673,7 +2674,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET + "&" + SECRET+")")); @@ -2724,14 +2725,14 @@ public class TestVisibilityLabelsWithDeletes { public void testDeleteColumnLatestWithNoCellVisibility() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = doPuts(tableName); TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.deleteColumn(fam, qual, 125l); table.delete(d); @@ -2754,7 +2755,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.deleteColumns(fam, qual, 125l); table.delete(d); @@ -2778,7 +2779,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.deleteFamily(fam, 125l); table.delete(d); @@ -2802,7 +2803,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.deleteFamily(fam); table.delete(d); @@ -2826,7 +2827,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.deleteColumns(fam, qual); table.delete(d); @@ -2850,7 +2851,7 @@ public class TestVisibilityLabelsWithDeletes { actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.deleteFamilyVersion(fam, 126l); table.delete(d); @@ -2915,7 +2916,7 @@ public class TestVisibilityLabelsWithDeletes { public void testVisibilityExpressionWithNotEqualORCondition() throws Exception { setAuths(); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin(); HColumnDescriptor colDesc = new HColumnDescriptor(fam); @@ -2936,7 +2937,7 @@ public class TestVisibilityLabelsWithDeletes { PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { try { - HTable table = new HTable(conf, TEST_NAME.getMethodName()); + HTableInterface table = new HTable(conf, TEST_NAME.getMethodName()); Delete d = new Delete(row1); d.deleteColumn(fam, qual, 124l); d.setCellVisibility(new CellVisibility(PRIVATE )); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java index 6cf1eba..9690c17 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -81,7 +82,7 @@ public class TestVisibilityLabelsWithSLGStack { @Test public void testWithSAGStack() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = TEST_UTIL.createTable(tableName, CF); Put put = new Put(ROW_1); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java index dfb5c84..a2ce247 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; import org.apache.hadoop.hbase.security.User; @@ -125,12 +126,12 @@ public class TestVisibilityWithCheckAuths { HTableDescriptor desc = new HTableDescriptor(tableName); desc.addFamily(colDesc); hBaseAdmin.createTable(desc); - HTable table = null; + HTableInterface table = null; try { TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString()); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, TEST_NAME.getMethodName()); Put p = new Put(row1); @@ -167,14 +168,14 @@ public class TestVisibilityWithCheckAuths { }; SUPERUSER.runAs(action); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); - HTable table = null; + HTableInterface table = null; try { table = TEST_UTIL.createTable(tableName, fam); final byte[] row1 = Bytes.toBytes("row1"); final byte[] val = Bytes.toBytes("a"); PrivilegedExceptionAction actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, TEST_NAME.getMethodName()); Put put = new Put(row1); @@ -190,7 +191,7 @@ public class TestVisibilityWithCheckAuths { USER.runAs(actiona); actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, TEST_NAME.getMethodName()); Append append = new Append(row1); @@ -205,7 +206,7 @@ public class TestVisibilityWithCheckAuths { USER.runAs(actiona); actiona = new PrivilegedExceptionAction() { public Void run() throws Exception { - HTable table = null; + HTableInterface table = null; try { table = new HTable(conf, TEST_NAME.getMethodName()); Append append = new Append(row1); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index 35b961c..e0ee283 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.client.RegionReplicaUtil; @@ -667,7 +668,7 @@ public class SnapshotTestingUtils { loadData(util, new HTable(util.getConfiguration(), tableName), rows, families); } - public static void loadData(final HBaseTestingUtility util, final HTable table, int rows, + public static void loadData(final HBaseTestingUtility util, final HTableInterface table, int rows, byte[]... families) throws IOException, InterruptedException { table.setAutoFlush(false, true); @@ -692,7 +693,7 @@ public class SnapshotTestingUtils { waitForTableToBeOnline(util, table.getName()); } - private static void putData(final HTable table, final byte[][] families, + private static void putData(final HTableInterface table, final byte[][] families, final byte[] key, final byte[] value) throws IOException { byte[] q = Bytes.toBytes("q"); Put put = new Put(key); @@ -722,7 +723,7 @@ public class SnapshotTestingUtils { public static void verifyRowCount(final HBaseTestingUtility util, final TableName tableName, long expectedRows) throws IOException { - HTable table = new HTable(util.getConfiguration(), tableName); + HTableInterface table = new HTable(util.getConfiguration(), tableName); try { assertEquals(expectedRows, util.countRows(table)); } finally { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java index e4e2719..8096e0c 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotFileInfo; @@ -121,7 +122,7 @@ public class TestExportSnapshot { admin.snapshot(emptySnapshotName, tableName); // Add some rows - HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(TEST_UTIL.getConfiguration(), tableName); SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 50, FAMILY); tableNumFiles = admin.getTableRegions(tableName).size(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java index be12cc5..bb48206 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.ScannerCallable; import org.apache.hadoop.hbase.ipc.RpcClient; import org.apache.hadoop.hbase.ipc.RpcServer; @@ -146,7 +147,7 @@ public class TestFlushSnapshotFromClient { SnapshotTestingUtils.assertNoSnapshots(admin); // put some stuff in the table - HTable table = new HTable(UTIL.getConfiguration(), TABLE_NAME); + HTableInterface table = new HTable(UTIL.getConfiguration(), TABLE_NAME); SnapshotTestingUtils.loadData(UTIL, table, DEFAULT_NUM_ROWS, TEST_FAM); LOG.debug("FS state before snapshot:"); @@ -185,7 +186,7 @@ public class TestFlushSnapshotFromClient { SnapshotTestingUtils.assertNoSnapshots(admin); // put some stuff in the table - HTable table = new HTable(UTIL.getConfiguration(), TABLE_NAME); + HTableInterface table = new HTable(UTIL.getConfiguration(), TABLE_NAME); UTIL.loadTable(table, TEST_FAM); LOG.debug("FS state before snapshot:"); @@ -229,7 +230,7 @@ public class TestFlushSnapshotFromClient { SnapshotTestingUtils.assertNoSnapshots(admin); // put some stuff in the table - HTable table = new HTable(UTIL.getConfiguration(), TABLE_NAME); + HTableInterface table = new HTable(UTIL.getConfiguration(), TABLE_NAME); SnapshotTestingUtils.loadData(UTIL, table, DEFAULT_NUM_ROWS, TEST_FAM); LOG.debug("FS state before snapshot:"); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java index 08ac6de..48dd88a 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.master.MasterFileSystem; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; @@ -102,7 +103,7 @@ public class TestRestoreFlushSnapshotFromClient { // create Table and disable it SnapshotTestingUtils.createTable(UTIL, tableName, FAMILY); - HTable table = new HTable(UTIL.getConfiguration(), tableName); + HTableInterface table = new HTable(UTIL.getConfiguration(), tableName); SnapshotTestingUtils.loadData(UTIL, table, 500, FAMILY); snapshot0Rows = UTIL.countRows(table); LOG.info("=== before snapshot with 500 rows"); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java index 859bff2..173cc3f 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.htrace.Sampler; import org.htrace.Span; @@ -63,7 +64,7 @@ public class TestHTraceHooks { @Test public void testTraceCreateTable() throws Exception { TraceScope tableCreationSpan = Trace.startSpan("creating table", Sampler.ALWAYS); - HTable table; + HTableInterface table; try { table = TEST_UTIL.createTable("table".getBytes(), diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java index 51d2c4a..3cc8ad0 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -120,7 +121,7 @@ public class RestartMetaTest extends AbstractHBaseTool { LOG.debug("Trying to scan meta"); - HTable metaTable = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface metaTable = new HTable(conf, TableName.META_TABLE_NAME); ResultScanner scanner = metaTable.getScanner(new Scan()); Result result; while ((result = scanner.next()) != null) { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java index dd2bfd4..21db98a 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java @@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -108,7 +109,7 @@ public class TestCoprocessorScanPolicy { if (TEST_UTIL.getHBaseAdmin().tableExists(tableName)) { TEST_UTIL.deleteTable(tableName); } - HTable t = TEST_UTIL.createTable(tableName, F, 1); + HTableInterface t = TEST_UTIL.createTable(tableName, F, 1); // set the version override to 2 Put p = new Put(R); p.setAttribute("versions", new byte[]{}); @@ -164,7 +165,7 @@ public class TestCoprocessorScanPolicy { .setTimeToLive(1); desc.addFamily(hcd); TEST_UTIL.getHBaseAdmin().createTable(desc); - HTable t = new HTable(new Configuration(TEST_UTIL.getConfiguration()), tableName); + HTableInterface t = new HTable(new Configuration(TEST_UTIL.getConfiguration()), tableName); long now = EnvironmentEdgeManager.currentTimeMillis(); ManualEnvironmentEdge me = new ManualEnvironmentEdge(); me.setValue(now); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java index 35a36ea..4540f28 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java @@ -76,6 +76,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.MetaScanner; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; @@ -169,7 +170,7 @@ public class TestHBaseFsck { // Now let's mess it up and change the assignment in hbase:meta to // point to a different region server - HTable meta = new HTable(conf, HTableDescriptor.META_TABLEDESC.getTableName(), + HTableInterface meta = new HTable(conf, HTableDescriptor.META_TABLEDESC.getTableName(), executorService); Scan scan = new Scan(); scan.setStartRow(Bytes.toBytes(table+",,")); @@ -214,7 +215,7 @@ public class TestHBaseFsck { assertNoErrors(doFsck(conf, false)); // comment needed - what is the purpose of this line - HTable t = new HTable(conf, Bytes.toBytes(table), executorService); + HTableInterface t = new HTable(conf, Bytes.toBytes(table), executorService); ResultScanner s = t.getScanner(new Scan()); s.close(); t.close(); @@ -251,7 +252,7 @@ public class TestHBaseFsck { private HRegionInfo createRegion(Configuration conf, final HTableDescriptor htd, byte[] startKey, byte[] endKey) throws IOException { - HTable meta = new HTable(conf, TableName.META_TABLE_NAME, executorService); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME, executorService); HRegionInfo hri = new HRegionInfo(htd.getTableName(), startKey, endKey); MetaTableAccessor.addRegionToMeta(meta, hri); meta.close(); @@ -350,7 +351,7 @@ public class TestHBaseFsck { } if (metaRow) { - HTable meta = new HTable(conf, TableName.META_TABLE_NAME, executorService); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME, executorService); Delete delete = new Delete(deleteRow); meta.delete(delete); } @@ -370,7 +371,7 @@ public class TestHBaseFsck { * @throws InterruptedException * @throws KeeperException */ - HTable setupTable(TableName tablename) throws Exception { + HTableInterface setupTable(TableName tablename) throws Exception { return setupTableWithRegionReplica(tablename, 1); } @@ -381,7 +382,7 @@ public class TestHBaseFsck { * @return * @throws Exception */ - HTable setupTableWithRegionReplica(TableName tablename, int replicaCount) throws Exception { + HTableInterface setupTableWithRegionReplica(TableName tablename, int replicaCount) throws Exception { HTableDescriptor desc = new HTableDescriptor(tablename); desc.setRegionReplication(replicaCount); HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toString(FAM)); @@ -633,7 +634,7 @@ public class TestHBaseFsck { // asks the master to assign the replica (the meta needs to be injected // for the master to treat the request for assignment as valid; the master // checks the region is valid either from its memory or meta) - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); List regions = TEST_UTIL.getHBaseAdmin().getTableRegions(table); byte[] startKey = Bytes.toBytes("B"); byte[] endKey = Bytes.toBytes("C"); @@ -909,7 +910,7 @@ public class TestHBaseFsck { assertNotNull(regionName); assertNotNull(serverName); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME, executorService); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME, executorService); Put put = new Put(regionName); put.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes.toBytes(serverName.getHostAndPort())); @@ -1464,7 +1465,7 @@ public class TestHBaseFsck { public void testLingeringSplitParent() throws Exception { TableName table = TableName.valueOf("testLingeringSplitParent"); - HTable meta = null; + HTableInterface meta = null; try { setupTable(table); assertEquals(ROWKEYS.length, countRows()); @@ -1544,7 +1545,7 @@ public class TestHBaseFsck { public void testValidLingeringSplitParent() throws Exception { TableName table = TableName.valueOf("testLingeringSplitParent"); - HTable meta = null; + HTableInterface meta = null; try { setupTable(table); assertEquals(ROWKEYS.length, countRows()); @@ -1594,7 +1595,7 @@ public class TestHBaseFsck { public void testSplitDaughtersNotInMeta() throws Exception { TableName table = TableName.valueOf("testSplitdaughtersNotInMeta"); - HTable meta = null; + HTableInterface meta = null; try { setupTable(table); assertEquals(ROWKEYS.length, countRows()); @@ -2146,7 +2147,7 @@ public class TestHBaseFsck { // Mess it up by removing the RegionInfo for one region. final List deletes = new LinkedList(); - HTable meta = new HTable(conf, HTableDescriptor.META_TABLEDESC.getTableName()); + HTableInterface meta = new HTable(conf, HTableDescriptor.META_TABLEDESC.getTableName()); MetaScanner.metaScan(conf, new MetaScanner.MetaScannerVisitor() { @Override @@ -2450,7 +2451,7 @@ public class TestHBaseFsck { @Test public void testHbckAfterRegionMerge() throws Exception { TableName table = TableName.valueOf("testMergeRegionFilesInHdfs"); - HTable meta = null; + HTableInterface meta = null; try { // disable CatalogJanitor TEST_UTIL.getHBaseCluster().getMaster().setCatalogJanitorEnabled(false); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java index ac28844..93c06fe 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; @@ -101,7 +102,7 @@ public class TestHBaseFsckEncryption { @Test public void testFsckWithEncryption() throws Exception { // Populate the table with some data - HTable table = new HTable(conf, htd.getName()); + HTableInterface table = new HTable(conf, htd.getName()); try { byte[] values = { 'A', 'B', 'C', 'D' }; for (int i = 0; i < values.length; i++) { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java index 88144a7..249b4ee 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTestConst; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.MediumTests; @@ -60,7 +61,8 @@ public class TestProcessBasedCluster { HTestConst.DEFAULT_CF_STR_SET, HColumnDescriptor.DEFAULT_VERSIONS, COLS_PER_ROW, FLUSHES, NUM_REGIONS, ROWS_PER_FLUSH); - HTable table = new HTable(TEST_UTIL.getConfiguration(), HTestConst.DEFAULT_TABLE_BYTES); + HTableInterface table = + new HTable(TEST_UTIL.getConfiguration(), HTestConst.DEFAULT_TABLE_BYTES); ResultScanner scanner = table.getScanner(HTestConst.DEFAULT_CF_BYTES); Result result; int rows = 0; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java index b6785b2..088698a 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -136,7 +137,7 @@ public class OfflineMetaRebuildTestCore { } } - private void populateTable(HTable tbl) throws IOException { + private void populateTable(HTableInterface tbl) throws IOException { byte[] values = { 'A', 'B', 'C', 'D' }; for (int i = 0; i < values.length; i++) { for (int j = 0; j < values.length; j++) { @@ -190,7 +191,7 @@ public class OfflineMetaRebuildTestCore { hri.getEncodedName()); fs.delete(p, true); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); Delete delete = new Delete(deleteRow); meta.delete(delete); meta.close(); @@ -203,9 +204,9 @@ public class OfflineMetaRebuildTestCore { dumpMeta(htd); } - protected HRegionInfo createRegion(Configuration conf, final HTable htbl, + protected HRegionInfo createRegion(Configuration conf, final HTableInterface htbl, byte[] startKey, byte[] endKey) throws IOException { - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); HTableDescriptor htd = htbl.getTableDescriptor(); HRegionInfo hri = new HRegionInfo(htbl.getName(), startKey, endKey); @@ -230,7 +231,7 @@ public class OfflineMetaRebuildTestCore { // Mess it up by blowing up meta. Admin admin = TEST_UTIL.getHBaseAdmin(); Scan s = new Scan(); - HTable meta = new HTable(conf, TableName.META_TABLE_NAME); + HTableInterface meta = new HTable(conf, TableName.META_TABLE_NAME); ResultScanner scanner = meta.getScanner(s); List dels = new ArrayList(); for (Result r : scanner) { @@ -257,7 +258,7 @@ public class OfflineMetaRebuildTestCore { */ protected int tableRowCount(Configuration conf, TableName table) throws IOException { - HTable t = new HTable(conf, table); + HTableInterface t = new HTable(conf, table); Scan st = new Scan(); ResultScanner rst = t.getScanner(st); @@ -277,7 +278,7 @@ public class OfflineMetaRebuildTestCore { */ protected int scanMeta() throws IOException { int count = 0; - HTable meta = new HTable(conf, HTableDescriptor.META_TABLEDESC.getTableName()); + HTableInterface meta = new HTable(conf, HTableDescriptor.META_TABLEDESC.getTableName()); ResultScanner scanner = meta.getScanner(new Scan()); LOG.info("Table: " + Bytes.toString(meta.getTableName())); for (Result res : scanner) { diff --git hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java index bb45c88..43b197e 100644 --- hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java +++ hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java @@ -36,6 +36,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler; import org.apache.hadoop.hbase.thrift.generated.TIncrement; import org.apache.hadoop.hbase.util.Bytes; @@ -265,7 +266,7 @@ public class IncrementCoalescer implements IncrementCoalescerMBean { continue; } try { - HTable table = handler.getTable(row.getTable()); + HTableInterface table = handler.getTable(row.getTable()); if (failures > 2) { throw new IOException("Auto-Fail rest of ICVs"); } diff --git hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java index 8583b5c..80a0bb6 100644 --- hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java +++ hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java @@ -67,6 +67,7 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.OperationWithAttributes; import org.apache.hadoop.hbase.client.Put; @@ -528,7 +529,7 @@ public class ThriftServerRunner implements Runnable { /** * The HBaseHandler is a glue object that connects Thrift RPC calls to the - * HBase client API primarily defined in the HBaseAdmin and HTable objects. + * HBase client API primarily defined in the HBaseAdmin and HTableInterface objects. */ public static class HBaseHandler implements Hbase.Iface { protected Configuration conf; @@ -560,7 +561,7 @@ public class ThriftServerRunner implements Runnable { * @param table * @throws IOException */ - byte[][] getAllColumns(HTable table) throws IOException { + byte[][] getAllColumns(HTableInterface table) throws IOException { HColumnDescriptor[] cds = table.getTableDescriptor().getColumnFamilies(); byte[][] columns = new byte[cds.length][]; for (int i = 0; i < cds.length; i++) { @@ -571,11 +572,11 @@ public class ThriftServerRunner implements Runnable { } /** - * Creates and returns an HTable instance from a given table name. + * Creates and returns an HTableInterface instance from a given table name. * * @param tableName * name of table - * @return HTable object + * @return HTableInterface object * @throws IOException * @throws IOError */ @@ -791,7 +792,7 @@ public class ThriftServerRunner implements Runnable { byte[] qualifier, Map attributes) throws IOError { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); Get get = new Get(getBytes(row)); addAttributes(get, attributes); if (qualifier == null) { @@ -833,7 +834,7 @@ public class ThriftServerRunner implements Runnable { public List getVer(ByteBuffer tableName, ByteBuffer row, byte[] family, byte[] qualifier, int numVersions, Map attributes) throws IOError { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); Get get = new Get(getBytes(row)); addAttributes(get, attributes); if (null == qualifier) { @@ -876,7 +877,7 @@ public class ThriftServerRunner implements Runnable { byte[] qualifier, long timestamp, int numVersions, Map attributes) throws IOError { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); Get get = new Get(getBytes(row)); addAttributes(get, attributes); if (null == qualifier) { @@ -924,7 +925,7 @@ public class ThriftServerRunner implements Runnable { ByteBuffer tableName, ByteBuffer row, List columns, long timestamp, Map attributes) throws IOError { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); if (columns == null) { Get get = new Get(getBytes(row)); addAttributes(get, attributes); @@ -987,7 +988,7 @@ public class ThriftServerRunner implements Runnable { Map attributes) throws IOError { try { List gets = new ArrayList(rows.size()); - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); if (metrics != null) { metrics.incNumRowKeysInBatchGet(rows.size()); } @@ -1031,7 +1032,7 @@ public class ThriftServerRunner implements Runnable { ByteBuffer column, long timestamp, Map attributes) throws IOError { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); Delete delete = new Delete(getBytes(row)); addAttributes(delete, attributes); byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); @@ -1060,7 +1061,7 @@ public class ThriftServerRunner implements Runnable { ByteBuffer tableName, ByteBuffer row, long timestamp, Map attributes) throws IOError { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); Delete delete = new Delete(getBytes(row), timestamp); addAttributes(delete, attributes); table.delete(delete); @@ -1124,7 +1125,7 @@ public class ThriftServerRunner implements Runnable { List mutations, long timestamp, Map attributes) throws IOError, IllegalArgument { - HTable table = null; + HTableInterface table = null; try { table = getTable(tableName); Put put = new Put(getBytes(row), timestamp); @@ -1226,7 +1227,7 @@ public class ThriftServerRunner implements Runnable { puts.add(put); } - HTable table = null; + HTableInterface table = null; try { table = getTable(tableName); if (!puts.isEmpty()) @@ -1258,7 +1259,7 @@ public class ThriftServerRunner implements Runnable { protected long atomicIncrement(ByteBuffer tableName, ByteBuffer row, byte [] family, byte [] qualifier, long amount) throws IOError, IllegalArgument, TException { - HTable table; + HTableInterface table; try { table = getTable(tableName); return table.incrementColumnValue( @@ -1316,7 +1317,7 @@ public class ThriftServerRunner implements Runnable { Map attributes) throws IOError { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); Scan scan = new Scan(); addAttributes(scan, attributes); if (tScan.isSetStartRow()) { @@ -1364,7 +1365,7 @@ public class ThriftServerRunner implements Runnable { List columns, Map attributes) throws IOError { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); Scan scan = new Scan(getBytes(startRow)); addAttributes(scan, attributes); if(columns != null && columns.size() != 0) { @@ -1390,7 +1391,7 @@ public class ThriftServerRunner implements Runnable { Map attributes) throws IOError, TException { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); Scan scan = new Scan(getBytes(startRow), getBytes(stopRow)); addAttributes(scan, attributes); if(columns != null && columns.size() != 0) { @@ -1417,7 +1418,7 @@ public class ThriftServerRunner implements Runnable { Map attributes) throws IOError, TException { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); Scan scan = new Scan(getBytes(startAndPrefix)); addAttributes(scan, attributes); Filter f = new WhileMatchFilter( @@ -1445,7 +1446,7 @@ public class ThriftServerRunner implements Runnable { List columns, long timestamp, Map attributes) throws IOError, TException { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); Scan scan = new Scan(getBytes(startRow)); addAttributes(scan, attributes); scan.setTimeRange(0, timestamp); @@ -1472,7 +1473,7 @@ public class ThriftServerRunner implements Runnable { Map attributes) throws IOError, TException { try { - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); Scan scan = new Scan(getBytes(startRow), getBytes(stopRow)); addAttributes(scan, attributes); scan.setTimeRange(0, timestamp); @@ -1501,7 +1502,7 @@ public class ThriftServerRunner implements Runnable { TreeMap columns = new TreeMap(); - HTable table = getTable(tableName); + HTableInterface table = getTable(tableName); HTableDescriptor desc = table.getTableDescriptor(); for (HColumnDescriptor e : desc.getFamilies()) { @@ -1520,7 +1521,7 @@ public class ThriftServerRunner implements Runnable { public List getRowOrBefore(ByteBuffer tableName, ByteBuffer row, ByteBuffer family) throws IOError { try { - HTable table = getTable(getBytes(tableName)); + HTableInterface table = getTable(getBytes(tableName)); Result result = table.getRowOrBefore(getBytes(row), getBytes(family)); return ThriftUtilities.cellFromHBase(result.rawCells()); } catch (IOException e) { @@ -1532,7 +1533,7 @@ public class ThriftServerRunner implements Runnable { @Override public TRegionInfo getRegionInfo(ByteBuffer searchRow) throws IOError { try { - HTable table = getTable(TableName.META_TABLE_NAME.getName()); + HTableInterface table = getTable(TableName.META_TABLE_NAME.getName()); byte[] row = getBytes(searchRow); Result startRowResult = table.getRowOrBefore( row, HConstants.CATALOG_FAMILY); @@ -1586,7 +1587,7 @@ public class ThriftServerRunner implements Runnable { } try { - HTable table = getTable(tincrement.getTable()); + HTableInterface table = getTable(tincrement.getTable()); Increment inc = ThriftUtilities.incrementFromThrift(tincrement); table.increment(inc); } catch (IOException e) { @@ -1613,7 +1614,7 @@ public class ThriftServerRunner implements Runnable { } try { - HTable table = getTable(tappend.getTable()); + HTableInterface table = getTable(tappend.getTable()); Append append = ThriftUtilities.appendFromThrift(tappend); Result result = table.append(append); return ThriftUtilities.cellFromHBase(result.rawCells()); @@ -1643,7 +1644,7 @@ public class ThriftServerRunner implements Runnable { throw new IllegalArgument(e.getMessage()); } - HTable table = null; + HTableInterface table = null; try { table = getTable(tableName); byte[][] famAndQf = KeyValue.parseColumn(getBytes(column));