Index: src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java (revision 9880) +++ src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java (revision 9881) @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; @@ -83,12 +84,19 @@ job.setJarByClass(Exporter.class); // TODO: Allow passing filter and subset of rows/columns. Scan s = new Scan(); + // Optional arguments. int versions = args.length > 2? Integer.parseInt(args[2]): 1; s.setMaxVersions(versions); long startTime = args.length > 3? Long.parseLong(args[3]): 0L; long endTime = args.length > 4? Long.parseLong(args[4]): Long.MAX_VALUE; s.setTimeRange(startTime, endTime); + s.setCacheBlocks(false); + + if (conf.get(TableInputFormat.SCAN_COLUMN_FAMILY) != null) { + s.addFamily(Bytes.toBytes(conf.get(TableInputFormat.SCAN_COLUMN_FAMILY))); + } + Log.info("verisons=" + versions + ", starttime=" + startTime + ", endtime=" + endTime); TableMapReduceUtil.initTableMapperJob(tableName, s, Exporter.class, null, @@ -109,8 +117,16 @@ if (errorMsg != null && errorMsg.length() > 0) { System.err.println("ERROR: " + errorMsg); } - System.err.println("Usage: Export [ " + - "[ []]]"); + System.err.println("Usage: Export [-D ]* [ " + + "[ []]]\n"); + System.err.println(" Note: -D properties will be applied to the conf used. "); + System.err.println(" For example: "); + System.err.println(" -D mapred.output.compress=true"); + System.err.println(" -D mapred.output.compression.codec=org.apache.hadoop.io.compress.GzipCodec"); + System.err.println(" -D mapred.output.compression.type=BLOCK"); + System.err.println(" Additionally, the following SCAN properties can be specified"); + System.err.println(" to control/limit what is exported.."); + System.err.println(" -D " + TableInputFormat.SCAN_COLUMN_FAMILY + "="); } /**