Index: hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java (revision 1375895) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java (working copy) @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.mapreduce; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.apache.hadoop.conf.Configuration; @@ -44,7 +45,6 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import static org.junit.Assert.assertEquals; @Category(MediumTests.class) public class TestImportExport { @@ -142,7 +142,26 @@ assertEquals(3, r.size()); } + /** + * Test export .META. table + * + * @throws Exception + */ @Test + public void testMetaExport() throws Exception { + String EXPORT_TABLE = ".META."; + String[] args = new String[] { EXPORT_TABLE, OUTPUT_DIR, "1", "0", "0" }; + GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); + Configuration conf = opts.getConfiguration(); + args = opts.getRemainingArgs(); + + Job job = Export.createSubmittableJob(conf, args); + job.getConfiguration().set("mapreduce.framework.name", "yarn"); + job.waitForCompletion(false); + assertTrue(job.isSuccessful()); + } + + @Test public void testWithDeletes() throws Exception { String EXPORT_TABLE = "exportWithDeletes"; HTableDescriptor desc = new HTableDescriptor(EXPORT_TABLE); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java (revision 1375895) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java (working copy) @@ -32,11 +32,14 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.mapreduce.InputFormat; @@ -156,9 +159,16 @@ context.getConfiguration().get("hbase.nameserver.address", null); Pair keys = table.getStartEndKeys(); - if (keys == null || keys.getFirst() == null || - keys.getFirst().length == 0) { - throw new IOException("Expecting at least one region."); + if (keys == null || keys.getFirst() == null || keys.getFirst().length == 0) { + HRegionLocation regLoc = table.getRegionLocation(HConstants.EMPTY_BYTE_ARRAY, false); + if (null == regLoc) { + throw new IOException("Expecting at least one region."); + } + List splits = new ArrayList(1); + InputSplit split = new TableSplit(table.getTableName(), HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, + regLoc.getHostnamePort().split(Addressing.HOSTNAME_PORT_SEPARATOR)[0]); + splits.add(split); + return splits; } List splits = new ArrayList(keys.getFirst().length); for (int i = 0; i < keys.getFirst().length; i++) {