diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index 2ed47d5..6cb82a1 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -57,7 +57,7 @@ public class TestImportExport { private static final byte[] FAMILYB = Bytes.toBytes(FAMILYB_STRING); private static final byte[] QUAL = Bytes.toBytes("q"); private static final String OUTPUT_DIR = "outputdir"; - private static final String EXPORT_BATCHING = "100"; + private static final String EXPORT_BATCHING = "hbase.export.scanner.batch"; private static MiniHBaseCluster cluster; private static long now = System.currentTimeMillis(); @@ -102,9 +102,7 @@ public class TestImportExport { String[] args = new String[] { EXPORT_TABLE, - OUTPUT_DIR, - EXPORT_BATCHING, - "1000" + OUTPUT_DIR }; GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); @@ -164,6 +162,46 @@ public class TestImportExport { assertTrue(job.isSuccessful()); } + /** Test export batching + * + */ + @Test + public void testBatchExport() throws Exception { + String BATCH_TABLE = "exportWithBatch"; + HTableDescriptor desc = new HTableDescriptor(BATCH_TABLE); + desc.addFamily(new HColumnDescriptor(FAMILYA) + .setMaxVersions(1) + ); + UTIL.getHBaseAdmin().createTable(desc); + HTable t = new HTable(UTIL.getConfiguration(), BATCH_TABLE); + + Put p = new Put(ROW1); + p.add(FAMILYA, QUAL, now, QUAL); + p.add(FAMILYA, QUAL, now+1, QUAL); + p.add(FAMILYA, QUAL, now+2, QUAL); + p.add(FAMILYA, QUAL, now+3, QUAL); + p.add(FAMILYA, QUAL, now+4, QUAL); + t.put(p); + + String[] args = new String[] { + "-D" + EXPORT_BATCHING + "=10", + BATCH_TABLE, + OUTPUT_DIR + }; + + GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); + Configuration conf = opts.getConfiguration(); + args = opts.getRemainingArgs(); + + Job job = Export.createSubmittableJob(conf, args); + job.getConfiguration().set("mapreduce.framework.name", "yarn"); + job.waitForCompletion(false); + assertTrue(job.isSuccessful()); + + FileSystem fs = FileSystem.get(UTIL.getConfiguration()); + fs.delete(new Path(OUTPUT_DIR), true); + } + @Test public void testWithDeletes() throws Exception { String EXPORT_TABLE = "exportWithDeletes"; @@ -175,7 +213,7 @@ public class TestImportExport { UTIL.getHBaseAdmin().createTable(desc); HTable t = new HTable(UTIL.getConfiguration(), EXPORT_TABLE); - Put p = new Put(ROW1); + Put p = new Put(ROW2); p.add(FAMILYA, QUAL, now, QUAL); p.add(FAMILYA, QUAL, now+1, QUAL); p.add(FAMILYA, QUAL, now+2, QUAL); @@ -183,18 +221,16 @@ public class TestImportExport { p.add(FAMILYA, QUAL, now+4, QUAL); t.put(p); - Delete d = new Delete(ROW1, now+3, null); + Delete d = new Delete(ROW2, now+3, null); t.delete(d); - d = new Delete(ROW1); + d = new Delete(ROW2); d.deleteColumns(FAMILYA, QUAL, now+2); t.delete(d); String[] args = new String[] { "-D" + Export.RAW_SCAN + "=true", EXPORT_TABLE, - OUTPUT_DIR, - EXPORT_BATCHING, - "1000" + OUTPUT_DIR }; GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);