Index: hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java (revision 1365880) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java (working copy) @@ -53,6 +53,7 @@ private static final Log LOG = LogFactory.getLog(Export.class); final static String NAME = "export"; final static String RAW_SCAN="hbase.mapreduce.include.deleted.rows"; + final static String EXPORT_BATCHING="hbase.mapreduce.export.batch"; /** * Mapper. @@ -133,6 +134,15 @@ if (exportFilter!= null) { LOG.info("Setting Scan Filter for Export."); s.setFilter(exportFilter); + } + + int batching = Integer.parseInt(conf.get(EXPORT_BATCHING,"-1")); + if (batching != -1){ + try { + s.setBatch(batching); + } catch (Exception e) { + LOG.error("Batching is not set because : "+e.toString()); + } } LOG.info("versions=" + versions + ", starttime=" + startTime + ", endtime=" + endTime + ", keepDeletedCells=" + raw); @@ -174,6 +184,8 @@ + " -Dhbase.client.scanner.caching=100\n" + " -Dmapred.map.tasks.speculative.execution=false\n" + " -Dmapred.reduce.tasks.speculative.execution=false"); + System.err.println("For very wide rows consider set scan batching properties as below:\n" + + " -Dhbase.mapreduce.export.batch=10"); } /**