Index: src/main/java/org/apache/hadoop/hbase/client/Delete.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/client/Delete.java (revision 1211760) +++ src/main/java/org/apache/hadoop/hbase/client/Delete.java (working copy) @@ -119,6 +119,24 @@ } /** + * Advanced use only. Create a Delete object based on a KeyValue + * of type "delete". + * @param kv + * @throws IOException + */ + public Delete(KeyValue kv) throws IOException { + this(kv.getRow(), kv.getTimestamp(), null); + if (!kv.isDelete()) { + throw new IOException("The recently added KeyValue is not of type delete"); + } + // can't use singletonList, because this might be modified at the server by + // coprocessors + ArrayList list = new ArrayList(1); + list.add(kv); + familyMap.put(kv.getFamily(), list); + } + + /** * Delete all versions of all columns of the specified family. *

* Overrides previous calls to deleteColumn and deleteColumns for the Index: src/main/java/org/apache/hadoop/hbase/client/Scan.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/client/Scan.java (revision 1211760) +++ src/main/java/org/apache/hadoop/hbase/client/Scan.java (working copy) @@ -22,7 +22,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.IncompatibleFilterException; import org.apache.hadoop.hbase.io.TimeRange; @@ -165,6 +164,9 @@ addFamily(fam); } } + for (Map.Entry attr : scan.getAttributesMap().entrySet()) { + setAttribute(attr.getKey(), attr.getValue()); + } } /** Index: src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java (revision 1211760) +++ src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java (working copy) @@ -27,6 +27,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -47,7 +49,7 @@ * Write table content out to files in hdfs. */ static class Importer - extends TableMapper { + extends TableMapper { private Map cfRenameMap; /** @@ -63,15 +65,15 @@ Context context) throws IOException { try { - context.write(row, resultToPut(row, value)); + writeResult(row, value, context); } catch (InterruptedException e) { e.printStackTrace(); } } - private Put resultToPut(ImmutableBytesWritable key, Result result) - throws IOException { - Put put = new Put(key.get()); + private void writeResult(ImmutableBytesWritable key, Result result, Context context) + throws IOException, InterruptedException { + Put put = null; for (KeyValue kv : result.raw()) { if(cfRenameMap != null) { // If there's a rename mapping for this CF, create a new KeyValue @@ -93,11 +95,20 @@ kv.getValueLength()); // value length } } - put.add(kv); + if (kv.isDelete()) { + context.write(key, new Delete(kv)); + } else { + if (put == null) { + put = new Put(key.get()); + } + put.add(kv); + } } - return put; + if (put != null) { + context.write(key, put); + } } - + @Override public void setup(Context context) { // Make a map from sourceCfName to destCfName by parsing a config key Index: src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java (revision 1211760) +++ src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java (working copy) @@ -48,6 +48,7 @@ public class Export { private static final Log LOG = LogFactory.getLog(Export.class); final static String NAME = "export"; + private final static String RAW_SCAN="hbase.mapreduce.include.deleted.rows"; /** * Mapper. @@ -115,7 +116,10 @@ // Set cache blocks s.setCacheBlocks(false); // Set Scan Column Family - if (conf.get(TableInputFormat.SCAN_COLUMN_FAMILY) != null) { + boolean raw = Boolean.parseBoolean(conf.get(RAW_SCAN)); + if (raw) { + s.setRaw(raw); + } else if (conf.get(TableInputFormat.SCAN_COLUMN_FAMILY) != null) { s.addFamily(Bytes.toBytes(conf.get(TableInputFormat.SCAN_COLUMN_FAMILY))); } // Set RowFilter or Prefix Filter if applicable. @@ -124,8 +128,8 @@ LOG.info("Setting Scan Filter for Export."); s.setFilter(exportFilter); } - LOG.info("verisons=" + versions + ", starttime=" + startTime + - ", endtime=" + endTime); + LOG.info("versions=" + versions + ", starttime=" + startTime + + ", endtime=" + endTime + ", keepDeletedCells=" + raw); return s; } @@ -159,6 +163,7 @@ System.err.println(" Additionally, the following SCAN properties can be specified"); System.err.println(" to control/limit what is exported.."); System.err.println(" -D " + TableInputFormat.SCAN_COLUMN_FAMILY + "="); + System.err.println(" -D " + RAW_SCAN + "=true"); } /**