diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java index 062779a..b4bc4a2 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java @@ -127,7 +127,7 @@ public class MetaScanner { * @param metaTableName Meta table to scan, root or meta. * @throws IOException e */ - static void metaScan(Configuration configuration, ClusterConnection connection, + static void metaScan(Configuration configuration, Connection connection, final MetaScannerVisitor visitor, final TableName tableName, final byte[] row, final int rowLimit, final TableName metaTableName) throws IOException { @@ -140,9 +140,9 @@ public class MetaScanner { // Calculate startrow for scan. byte[] startRow; ResultScanner scanner = null; - HTable metaTable = null; + Table metaTable = null; try { - metaTable = new HTable(TableName.META_TABLE_NAME, connection, null); + metaTable = connection.getTable(TableName.META_TABLE_NAME); if (row != null) { // Scan starting at a particular row in a particular table byte[] searchRow = HRegionInfo.createRegionName(tableName, row, HConstants.NINES, false); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java index ed4cf34..999ec63 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java @@ -26,7 +26,9 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -46,10 +48,14 @@ import org.apache.hadoop.mapred.Reporter; * class ExampleTIF extends TableInputFormatBase implements JobConfigurable { * * public void configure(JobConf job) { - * HTable exampleTable = new HTable(HBaseConfiguration.create(job), - * Bytes.toBytes("exampleTable")); + * Connection conn = + * ConnectionFactory.createConnectin(HBaseConfiguration.create(job)); + * TableName tableName = TableName.valueOf("exampleTable"); + * Table table = conn.createTable(tableName); + * RegionLocator regionLocator = conn.createRegionLocator(tableName); * // mandatory - * setHTable(exampleTable); + * setTable(table); + * setRegionLocator(regionLocator); * Text[] inputColumns = new byte [][] { Bytes.toBytes("columnA"), * Bytes.toBytes("columnB") }; * // mandatory @@ -72,7 +78,8 @@ public abstract class TableInputFormatBase implements InputFormat { private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class); private byte [][] inputColumns; - private HTable table; + private Table table; + private RegionLocator regionLocator; private TableRecordReader tableRecordReader; private Filter rowFilter; @@ -94,7 +101,7 @@ implements InputFormat { } trr.setStartRow(tSplit.getStartRow()); trr.setEndRow(tSplit.getEndRow()); - trr.setHTable(this.table); + trr.setTable(this.table); trr.setInputColumns(this.inputColumns); trr.setRowFilter(this.rowFilter); trr.init(); @@ -122,7 +129,10 @@ implements InputFormat { if (this.table == null) { throw new IOException("No table was provided"); } - byte [][] startKeys = this.table.getStartKeys(); + if (this.regionLocator == null) { + throw new IOException("No regionLocator was provided"); + } + byte [][] startKeys = this.regionLocator.getStartKeys(); if (startKeys == null || startKeys.length == 0) { throw new IOException("Expecting at least one region"); } @@ -137,7 +147,7 @@ implements InputFormat { for (int i = 0; i < realNumSplits; i++) { int lastPos = startPos + middle; lastPos = startKeys.length % realNumSplits > i ? lastPos + 1 : lastPos; - String regionLocation = table.getRegionLocation(startKeys[startPos]). + String regionLocation = regionLocator.getRegionLocation(startKeys[startPos]). getHostname(); splits[i] = new TableSplit(this.table.getName(), startKeys[startPos], ((i + 1) < realNumSplits) ? startKeys[lastPos]: @@ -158,8 +168,9 @@ implements InputFormat { /** * Allows subclasses to get the {@link HTable}. */ + @Deprecated protected HTable getHTable() { - return this.table; + return (HTable) this.table; } /** @@ -167,8 +178,10 @@ implements InputFormat { * * @param table to get the data from */ + @Deprecated protected void setHTable(HTable table) { this.table = table; + this.regionLocator = table; } /** diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java index a9496a1..ebe8070 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java @@ -22,7 +22,6 @@ import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.Filter; @@ -61,10 +60,20 @@ implements RecordReader { } /** - * @param htable the {@link HTable} to scan. + * @param table the {@link Table} to scan. + * + * Use setTable() instead. */ - public void setHTable(Table htable) { - this.recordReaderImpl.setHTable(htable); + @Deprecated + public void setHTable(Table table) { + this.setTable(table); + } + + /** + * @param table the {@link Table} to scan. + */ + public void setTable(Table table) { + this.recordReaderImpl.setHTable(table); } /** diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java index 7517c1f..e75ea89 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java @@ -53,7 +53,7 @@ public class TableRecordReaderImpl { private byte [] lastSuccessfulRow; private Filter trrRowFilter; private ResultScanner scanner; - private Table htable; + private Table table; private byte [][] trrInputColumns; private long timestamp; private int rowcount; @@ -74,7 +74,7 @@ public class TableRecordReaderImpl { TableInputFormat.addColumns(scan, trrInputColumns); scan.setFilter(trrRowFilter); scan.setCacheBlocks(false); - this.scanner = this.htable.getScanner(scan); + this.scanner = this.table.getScanner(scan); currentScan = scan; } else { LOG.debug("TIFB.restart, firstRow: " + @@ -82,7 +82,7 @@ public class TableRecordReaderImpl { Bytes.toStringBinary(endRow)); Scan scan = new Scan(firstRow, endRow); TableInputFormat.addColumns(scan, trrInputColumns); - this.scanner = this.htable.getScanner(scan); + this.scanner = this.table.getScanner(scan); currentScan = scan; } } else { @@ -92,7 +92,7 @@ public class TableRecordReaderImpl { Scan scan = new Scan(firstRow); TableInputFormat.addColumns(scan, trrInputColumns); scan.setFilter(trrRowFilter); - this.scanner = this.htable.getScanner(scan); + this.scanner = this.table.getScanner(scan); currentScan = scan; } if (logScannerActivity) { @@ -115,14 +115,14 @@ public class TableRecordReaderImpl { return this.startRow; } /** - * @param htable the {@link HTable} to scan. + * @param table the {@link HTable} to scan. */ - public void setHTable(Table htable) { - Configuration conf = htable.getConfiguration(); + public void setHTable(Table table) { + Configuration conf = table.getConfiguration(); logScannerActivity = conf.getBoolean( ScannerCallable.LOG_SCANNER_ACTIVITY, false); logPerRowCount = conf.getInt(LOG_PER_ROW_COUNT, 100); - this.htable = htable; + this.table = table; } /** @@ -157,7 +157,7 @@ public class TableRecordReaderImpl { public void close() { this.scanner.close(); try { - this.htable.close(); + this.table.close(); } catch (IOException ioe) { LOG.warn("Error closing table", ioe); } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java index 15036ee..659bb44 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java @@ -28,7 +28,11 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; @@ -99,7 +103,7 @@ public abstract class MultiTableInputFormatBase extends sc.setStartRow(tSplit.getStartRow()); sc.setStopRow(tSplit.getEndRow()); trr.setScan(sc); - trr.setHTable(table); + trr.setTable(table); } catch (IOException ioe) { // If there is an exception make sure that all // resources are closed and released. @@ -127,31 +131,38 @@ public abstract class MultiTableInputFormatBase extends List splits = new ArrayList(); for (Scan scan : scans) { - byte[] tableName = scan.getAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME); - if (tableName == null) + byte[] tableNameBytes = scan.getAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME); + if (tableNameBytes == null) throw new IOException("A scan object did not have a table name"); - HTable table = null; + TableName tableName = TableName.valueOf(tableNameBytes); + Table table = null; + RegionLocator regionLocator = null; + Connection conn = null; try { - table = new HTable(context.getConfiguration(), tableName); - Pair keys = table.getStartEndKeys(); + conn = ConnectionFactory.createConnection(context.getConfiguration()); + table = conn.getTable(tableName); + regionLocator = conn.getRegionLocator(tableName); + regionLocator = (RegionLocator) table; + Pair keys = regionLocator.getStartEndKeys(); if (keys == null || keys.getFirst() == null || keys.getFirst().length == 0) { throw new IOException("Expecting at least one region for table : " - + Bytes.toString(tableName)); + + tableName.getNameAsString()); } int count = 0; byte[] startRow = scan.getStartRow(); byte[] stopRow = scan.getStopRow(); - RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(table); + RegionSizeCalculator sizeCalculator = new RegionSizeCalculator((HTable) table); for (int i = 0; i < keys.getFirst().length; i++) { if (!includeRegionInSplit(keys.getFirst()[i], keys.getSecond()[i])) { continue; } - HRegionLocation hregionLocation = table.getRegionLocation(keys.getFirst()[i], false); + HRegionLocation hregionLocation = regionLocator.getRegionLocation( + keys.getFirst()[i], false); String regionHostname = hregionLocation.getHostname(); HRegionInfo regionInfo = hregionLocation.getRegionInfo(); @@ -181,6 +192,8 @@ public abstract class MultiTableInputFormatBase extends } } finally { if (null != table) table.close(); + if (null != regionLocator) regionLocator.close(); + if (null != conn) conn.close(); } } return splits; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java index 7416093..edc1ae0 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java @@ -34,8 +34,10 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; @@ -48,7 +50,7 @@ import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.net.DNS; -import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; /** * A base for {@link TableInputFormat}s. Receives a {@link HTable}, an @@ -60,10 +62,14 @@ import org.apache.hadoop.util.StringUtils; * class ExampleTIF extends TableInputFormatBase implements JobConfigurable { * * public void configure(JobConf job) { - * HTable exampleTable = new HTable(HBaseConfiguration.create(job), - * Bytes.toBytes("exampleTable")); + * Connection conn = + * ConnectionFactory.createConnectin(HBaseConfiguration.create(job)); + * TableName tableName = TableName.valueOf("exampleTable"); + * Table table = conn.createTable(tableName); + * RegionLocator regionLocator = conn.createRegionLocator(tableName); * // mandatory - * setHTable(exampleTable); + * setTable(table); + * setRegionLocator(regionLocator); * Text[] inputColumns = new byte [][] { Bytes.toBytes("cf1:columnA"), * Bytes.toBytes("cf2") }; * // mandatory @@ -88,7 +94,9 @@ extends InputFormat { /** Holds the details for the internal scanner. */ private Scan scan = null; /** The table to scan. */ - private HTable table = null; + private Table table; + /** The regionLocator of the table. */ + private RegionLocator regionLocator; /** The reader scanning the table, can be a custom one. */ private TableRecordReader tableRecordReader = null; @@ -116,13 +124,16 @@ extends InputFormat { public RecordReader createRecordReader( InputSplit split, TaskAttemptContext context) throws IOException { - if (table == null) { - throw new IOException("Cannot create a record reader because of a" + - " previous error. Please look at the previous logs lines from" + - " the task's full log for more details."); + if (this.table == null) { + throw new IOException("No table was provided"); + } + if (this.regionLocator == null) { + throw new IOException("No regionLocator was provided"); } TableSplit tSplit = (TableSplit) split; - LOG.info("Input split length: " + StringUtils.humanReadableInt(tSplit.getLength()) + " bytes."); + LOG.info("Input split length: " + + TraditionalBinaryPrefix.long2String(tSplit.getLength(), "", 1) + + " bytes."); TableRecordReader trr = this.tableRecordReader; // if no table record reader was provided use default if (trr == null) { @@ -132,7 +143,7 @@ extends InputFormat { sc.setStartRow(tSplit.getStartRow()); sc.setStopRow(tSplit.getEndRow()); trr.setScan(sc); - trr.setHTable(table); + trr.setTable(table); return trr; } @@ -155,12 +166,12 @@ extends InputFormat { this.nameServer = context.getConfiguration().get("hbase.nameserver.address", null); - RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(table); + RegionSizeCalculator sizeCalculator = new RegionSizeCalculator((HTable) table); - Pair keys = table.getStartEndKeys(); + Pair keys = regionLocator.getStartEndKeys(); if (keys == null || keys.getFirst() == null || keys.getFirst().length == 0) { - HRegionLocation regLoc = table.getRegionLocation(HConstants.EMPTY_BYTE_ARRAY, false); + HRegionLocation regLoc = regionLocator.getRegionLocation(HConstants.EMPTY_BYTE_ARRAY, false); if (null == regLoc) { throw new IOException("Expecting at least one region."); } @@ -177,7 +188,7 @@ extends InputFormat { if ( !includeRegionInSplit(keys.getFirst()[i], keys.getSecond()[i])) { continue; } - HRegionLocation location = table.getRegionLocation(keys.getFirst()[i], false); + HRegionLocation location = regionLocator.getRegionLocation(keys.getFirst()[i], false); // The below InetSocketAddress creation does a name resolution. InetSocketAddress isa = new InetSocketAddress(location.getHostname(), location.getPort()); if (isa.isUnresolved()) { @@ -258,18 +269,41 @@ extends InputFormat { /** * Allows subclasses to get the {@link HTable}. + * + * Use getTable() and getRegionLocator() instead. */ + @Deprecated protected HTable getHTable() { - return this.table; + return (HTable) this.table; } /** * Allows subclasses to set the {@link HTable}. + * + * Use setTable() and setRegionLocator() instead. * * @param table The table to get the data from. */ + @Deprecated protected void setHTable(HTable table) { this.table = table; + this.regionLocator = table; + } + + /** + * Allows subclasses to get the {@link Table}. + * + */ + protected Table getTable() { + return table; + } + + /** + * Allows subclasses to get the {@link RegionLocator}. + * + */ + protected RegionLocator getrLocator() { + return regionLocator; } /** diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java index 825729d..0d9d3bc 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java @@ -22,7 +22,6 @@ import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; @@ -57,9 +56,19 @@ extends RecordReader { * Sets the HBase table. * * @param htable The {@link HTable} to scan. + * + * Use setTable() instead. */ + @Deprecated public void setHTable(Table htable) { - this.recordReaderImpl.setHTable(htable); + this.setTable(htable); + } + + /** + * @param table the {@link Table} to scan. + */ + public void setTable(Table table) { + this.recordReaderImpl.setHTable(table); } /** diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSizeCalculator.java hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSizeCalculator.java index 9284f03..3de7948 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSizeCalculator.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSizeCalculator.java @@ -17,6 +17,15 @@ */ package org.apache.hadoop.hbase.util; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -29,16 +38,7 @@ import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; -import org.apache.hadoop.hbase.client.HTable; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; -import java.util.TreeSet; +import org.apache.hadoop.hbase.client.Table; /** * Computes size of each region for given table and given column families. @@ -60,12 +60,12 @@ public class RegionSizeCalculator { /** * Computes size of each region for table and given column families. * */ - public RegionSizeCalculator(HTable table) throws IOException { + public RegionSizeCalculator(Table table) throws IOException { this(table, new HBaseAdmin(table.getConfiguration())); } /** ctor for unit testing */ - RegionSizeCalculator (HTable table, Admin admin) throws IOException { + RegionSizeCalculator (Table table, Admin admin) throws IOException { try { if (!enabled(table.getConfiguration())) { @@ -73,7 +73,7 @@ public class RegionSizeCalculator { return; } - LOG.info("Calculating region sizes for table \"" + new String(table.getTableName()) + "\"."); + LOG.info("Calculating region sizes for table \"" + table.getName().getNameAsString() + "\"."); //get regions for table Set tableRegionInfos = table.getRegionLocations().keySet(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java index 6a6da1f..cb54bfe 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java @@ -131,7 +131,7 @@ public class TestTableInputFormat { new org.apache.hadoop.hbase.mapred.TableRecordReader(); trr.setStartRow("aaa".getBytes()); trr.setEndRow("zzz".getBytes()); - trr.setHTable(table); + trr.setTable(table); trr.setInputColumns(columns); trr.init();