Index: src/java/org/apache/hadoop/hbase/client/Delete.java
===================================================================
--- src/java/org/apache/hadoop/hbase/client/Delete.java (revision 890549)
+++ src/java/org/apache/hadoop/hbase/client/Delete.java (working copy)
@@ -343,4 +343,32 @@
}
}
}
+
+ /**
+ * Delete all versions of the specified column, given in
+ * family:qualifier notation, and with a timestamp less than
+ * or equal to the specified timestamp.
+ * @param column colon-delimited family and qualifier
+ * @param timestamp maximum version timestamp
+ * @deprecated use {@link #deleteColumn(byte[], byte[], long)} instead
+ */
+ public Delete deleteColumns(byte [] column, long timestamp) {
+ byte [][] parts = KeyValue.parseColumn(column);
+ this.deleteColumns(parts[0], parts[1], timestamp);
+ return this;
+ }
+
+ /**
+ * Delete the latest version of the specified column, given in
+ * family:qualifier notation.
+ * @param column colon-delimited family and qualifier
+ * @deprecated use {@link #deleteColumn(byte[], byte[])} instead
+ */
+ public Delete deleteColumn(byte [] column) {
+ byte [][] parts = KeyValue.parseColumn(column);
+ this.deleteColumn(parts[0], parts[1], HConstants.LATEST_TIMESTAMP);
+ return this;
+ }
+
+
}
Index: src/java/org/apache/hadoop/hbase/client/Put.java
===================================================================
--- src/java/org/apache/hadoop/hbase/client/Put.java (revision 890549)
+++ src/java/org/apache/hadoop/hbase/client/Put.java (working copy)
@@ -519,4 +519,18 @@
}
}
}
+
+ /**
+ * Add the specified column and value, with the specified timestamp as
+ * its version to this Put operation.
+ * @param column Old style column name with family and qualifier put together
+ * with a colon.
+ * @param ts version timestamp
+ * @param value column value
+ * @deprecated use {@link #add(byte[], byte[], long, byte[])} instead
+ */
+ public Put add(byte [] column, long ts, byte [] value) {
+ byte [][] parts = KeyValue.parseColumn(column);
+ return add(parts[0], parts[1], ts, value);
+ }
}
Index: src/java/org/apache/hadoop/hbase/client/Get.java
===================================================================
--- src/java/org/apache/hadoop/hbase/client/Get.java (revision 890549)
+++ src/java/org/apache/hadoop/hbase/client/Get.java (working copy)
@@ -131,7 +131,7 @@
familyMap.put(family, set);
return this;
}
-
+
/**
* Get versions of columns only within the specified timestamp range,
* [minStamp, maxStamp).
@@ -397,4 +397,38 @@
throw new RuntimeException("Can't find class " + className);
}
}
+
+ /**
+ * Adds an array of columns specified the old format, family:qualifier.
+ *
+ * Overrides previous calls to addFamily for any families in the input. + * @param columns array of columns, formatted as
family:qualifier+ * @deprecated issue multiple {@link #addColumn(byte[], byte[])} instead + */ + public Get addColumns(byte [][] columns) { + if (columns == null) return this; + for(int i = 0; i < columns.length; i++) { + try { + addColumn(columns[i]); + } catch(Exception e) {} + } + return this; + } + + /** + * + * @param column Old format column. + * @return This. + * @deprecated use {@link #addColumn(byte[], byte[])} instead + */ + public Get addColumn(final byte [] column) { + if (column == null) return this; + byte [][] split = KeyValue.parseColumn(column); + if (split.length > 1 && split[1] != null && split[1].length > 0) { + addColumn(split[0], split[1]); + } else { + addFamily(split[0]); + } + return this; + } } Index: src/java/org/apache/hadoop/hbase/client/Scan.java =================================================================== --- src/java/org/apache/hadoop/hbase/client/Scan.java (revision 890549) +++ src/java/org/apache/hadoop/hbase/client/Scan.java (working copy) @@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.util.Bytes; @@ -529,4 +530,91 @@ } } } + + /** + * Parses a combined family and qualifier and adds either both or just the + * family in case there is not qualifier. This assumes the older colon + * divided notation, e.g. "data:contents" or "meta:". + *
+ * Note: It will through an error when the colon is missing. + * + * @param familyAndQualifier + * @return A reference to this instance. + * @throws IllegalArgumentException When the colon is missing. + * @deprecated use {@link #addColumn(byte[], byte[])} instead + */ + public Scan addColumn(byte[] familyAndQualifier) { + byte [][] fq = KeyValue.parseColumn(familyAndQualifier); + if (fq.length > 1 && fq[1] != null && fq[1].length > 0) { + addColumn(fq[0], fq[1]); + } else { + addFamily(fq[0]); + } + return this; + } + + /** + * Adds an array of columns specified using old format, family:qualifier. + *
+ * Overrides previous calls to addFamily for any families in the input. + * + * @param columns array of columns, formatted as
family:qualifier+ * @deprecated issue multiple {@link #addColumn(byte[], byte[])} instead + */ + public Scan addColumns(byte [][] columns) { + for (int i = 0; i < columns.length; i++) { + addColumn(columns[i]); + } + return this; + } + + /** + * Convenience method to help parse old style (or rather user entry on the + * command line) column definitions, e.g. "data:contents mime:". The columns + * must be space delimited and always have a colon (":") to denote family + * and qualifier. + * + * @param columns The columns to parse. + * @return A reference to this instance. + * @deprecated use {@link #addColumn(byte[], byte[])} instead + */ + public Scan addColumns(String columns) { + String[] cols = columns.split(" "); + for (String col : cols) { + addColumn(Bytes.toBytes(col)); + } + return this; + } + + /** + * Helps to convert the binary column families and qualifiers to a text + * representation, e.g. "data:mimetype data:contents meta:". Binary values + * are properly encoded using {@link Bytes#toBytesBinary(String)}. + * + * @return The columns in an old style string format. + * @deprecated + */ + public String getInputColumns() { + String cols = ""; + for (Map.Entry