diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index d23da64..98f6e9b 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -22,7 +22,6 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.FileNotFoundException; import java.io.IOException; -import java.io.InterruptedIOException; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; @@ -53,7 +52,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.HTable; @@ -70,7 +68,6 @@ import org.apache.hadoop.hbase.client.BufferedMutatorParams; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Result; @@ -81,14 +78,12 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.mapreduce.TableMapper; import org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl; -import org.apache.hadoop.hbase.mapreduce.WALPlayer; import org.apache.hadoop.hbase.regionserver.FlushLargeStoresPolicy; import org.apache.hadoop.hbase.regionserver.FlushPolicyFactory; -import org.apache.hadoop.hbase.regionserver.wal.WALEdit; +import org.apache.hadoop.hbase.testclassification.IntegrationTests; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.RegionSplitter; -import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Writable; @@ -668,85 +663,12 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { return run(inputDir, numMappers); } - /** - * WALPlayer override that searches for keys loaded in the setup. - */ - public static class WALSearcher extends WALPlayer { - public WALSearcher(Configuration conf) { - super(conf); - } - - /** - * The actual searcher mapper. - */ - public static class WALMapperSearcher extends WALMapper { - private SortedSet keysToFind; - private AtomicInteger rows = new AtomicInteger(0); - - @Override - public void setup(Mapper.Context context) - throws IOException { - super.setup(context); - try { - this.keysToFind = readKeysToSearch(context.getConfiguration()); - LOG.info("Loaded keys to find: count=" + this.keysToFind.size()); - } catch (InterruptedException e) { - throw new InterruptedIOException(e.toString()); - } - } - - @Override - protected boolean filter(Context context, Cell cell) { - // TODO: Can I do a better compare than this copying out key? - byte [] row = new byte [cell.getRowLength()]; - System.arraycopy(cell.getRowArray(), cell.getRowOffset(), row, 0, cell.getRowLength()); - boolean b = this.keysToFind.contains(row); - if (b) { - String keyStr = Bytes.toStringBinary(row); - try { - LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey()); - } catch (IOException|InterruptedException e) { - LOG.warn(e); - } - if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) { - context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1); - } - context.getCounter(FOUND_GROUP_KEY, "CELL_WITH_MISSING_ROW").increment(1); - } - return b; - } - } - - // Put in place the above WALMapperSearcher. - @Override - public Job createSubmittableJob(String[] args) throws IOException { - Job job = super.createSubmittableJob(args); - // Call my class instead. - job.setJarByClass(WALMapperSearcher.class); - job.setMapperClass(WALMapperSearcher.class); - job.setOutputFormatClass(NullOutputFormat.class); - return job; - } - } - static final String FOUND_GROUP_KEY = "Found"; static final String SEARCHER_INPUTDIR_KEY = "searcher.keys.inputdir"; public int run(Path inputDir, int numMappers) throws Exception { - getConf().set(SEARCHER_INPUTDIR_KEY, inputDir.toString()); SortedSet keys = readKeysToSearch(getConf()); - if (keys.isEmpty()) throw new RuntimeException("No keys to find"); - LOG.info("Count of keys to find: " + keys.size()); - for(byte [] key: keys) LOG.info("Key: " + Bytes.toStringBinary(key)); - Path hbaseDir = new Path(getConf().get(HConstants.HBASE_DIR)); - // Now read all WALs. In two dirs. Presumes certain layout. - Path walsDir = new Path(hbaseDir, HConstants.HREGION_LOGDIR_NAME); - Path oldWalsDir = new Path(hbaseDir, HConstants.HREGION_OLDLOGDIR_NAME); - LOG.info("Running Search with keys inputDir=" + inputDir +", numMappers=" + numMappers + - " against " + getConf().get(HConstants.HBASE_DIR)); - int ret = ToolRunner.run(new WALSearcher(getConf()), new String [] {walsDir.toString(), ""}); - if (ret != 0) return ret; - return ToolRunner.run(new WALSearcher(getConf()), new String [] {oldWalsDir.toString(), ""}); + return IntegrationTestLoadAndVerify.doSearch(getConf(), inputDir, keys); } static SortedSet readKeysToSearch(final Configuration conf) diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java index 9ecb9ca..0c958ba 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java @@ -59,7 +59,7 @@ import org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl; import org.apache.hadoop.hbase.mapreduce.WALPlayer; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.wal.WALKey; +import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; @@ -501,21 +501,24 @@ public void cleanUpCluster() throws Exception { private int doSearch(Configuration conf, String keysDir) throws Exception { Path inputDir = new Path(keysDir); - - getConf().set(SEARCHER_INPUTDIR_KEY, inputDir.toString()); SortedSet keys = readKeysToSearch(getConf()); + return doSearch(getConf(), inputDir, keys); + } + + public static int doSearch(Configuration conf, Path inputDir, SortedSet keys) + throws Exception { if (keys.isEmpty()) throw new RuntimeException("No keys to find"); + conf.set(SEARCHER_INPUTDIR_KEY, inputDir.toString()); LOG.info("Count of keys to find: " + keys.size()); for(byte [] key: keys) LOG.info("Key: " + Bytes.toStringBinary(key)); - Path hbaseDir = new Path(getConf().get(HConstants.HBASE_DIR)); + Path hbaseDir = FSUtils.getRootDir(conf); // Now read all WALs. In two dirs. Presumes certain layout. Path walsDir = new Path(hbaseDir, HConstants.HREGION_LOGDIR_NAME); Path oldWalsDir = new Path(hbaseDir, HConstants.HREGION_OLDLOGDIR_NAME); - LOG.info("Running Search with keys inputDir=" + inputDir + - " against " + getConf().get(HConstants.HBASE_DIR)); - int ret = ToolRunner.run(new WALSearcher(getConf()), new String [] {walsDir.toString(), ""}); + LOG.info("Running Search with keys inputDir=" + inputDir + " against " + hbaseDir); + int ret = ToolRunner.run(new WALSearcher(conf), new String[] {walsDir.toString(), ""}); if (ret != 0) return ret; - return ToolRunner.run(new WALSearcher(getConf()), new String [] {oldWalsDir.toString(), ""}); + return ToolRunner.run(new WALSearcher(conf), new String[] {oldWalsDir.toString(), ""}); } private static void setJobScannerConf(Job job) {