diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 9f8fa55..7b0e52f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -61,7 +61,8 @@ import com.google.common.primitives.Longs;
* KeyValue wraps a byte array and takes offsets and lengths into passed array at where to start
* interpreting the content as KeyValue. The KeyValue format inside a byte array is:
* <keylength> <valuelength> <key> <value> Key is further decomposed as:
- * <rowlength> <row> <columnfamilylength> <columnfamily> <columnqualifier> <timestamp> <keytype>
+ * <rowlength> <row> <columnfamilylength> <columnfamily> <columnqualifier>
+ * <timestamp> <keytype>
* The rowlength maximum is Short.MAX_SIZE, column family length maximum
* is Byte.MAX_SIZE, and column qualifier + key length must be <
* Integer.MAX_SIZE. The column does not contain the family/qualifier delimiter,
@@ -1306,10 +1307,10 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
// If no delimiter, return array of size 1
return new byte [][] { c };
} else if(index == c.length - 1) {
- // Only a family, return array size 1
+ // family with empty qualifier, return array size 2
byte [] family = new byte[c.length-1];
System.arraycopy(c, 0, family, 0, family.length);
- return new byte [][] { family };
+ return new byte [][] { family, HConstants.EMPTY_BYTE_ARRAY};
}
// Family and column, return array size 2
final byte [][] result = new byte [2][];
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
index eda5dd8..021b101 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
@@ -77,6 +77,11 @@ public class TestKeyValue extends TestCase {
// Test empty value and empty column -- both should work. (not empty fam)
check(Bytes.toBytes(getName()), Bytes.toBytes(getName()), null, 1, null);
check(HConstants.EMPTY_BYTE_ARRAY, Bytes.toBytes(getName()), null, 1, null);
+ // empty qual is equivalent to null qual
+ assertEquals(
+ new KeyValue(Bytes.toBytes("rk"), Bytes.toBytes("fam"), null, 1, (byte[]) null),
+ new KeyValue(Bytes.toBytes("rk"), Bytes.toBytes("fam"),
+ HConstants.EMPTY_BYTE_ARRAY, 1, (byte[]) null));
}
private void check(final byte [] row, final byte [] family, byte [] qualifier,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
index dd6ef51..8dc35df 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
@@ -54,7 +54,7 @@ implements Configurable {
public static final String SCAN_ROW_STOP = "hbase.mapreduce.scan.row.stop";
/** Column Family to Scan */
public static final String SCAN_COLUMN_FAMILY = "hbase.mapreduce.scan.column.family";
- /** Space delimited list of columns to scan. */
+ /** Space delimited list of columns and column families to scan. */
public static final String SCAN_COLUMNS = "hbase.mapreduce.scan.columns";
/** The timestamp used to filter columns with a specific timestamp. */
public static final String SCAN_TIMESTAMP = "hbase.mapreduce.scan.timestamp";
@@ -159,18 +159,16 @@ implements Configurable {
/**
* Parses a combined family and qualifier and adds either both or just the
- * family in case there is not qualifier. This assumes the older colon
- * divided notation, e.g. "data:contents" or "meta:".
- *
- * Note: It will through an error when the colon is missing. + * family in case there is no qualifier. This assumes the older colon + * divided notation, e.g. "family:qualifier". * + * @param scan The Scan to update. * @param familyAndQualifier family and qualifier * @return A reference to this instance. - * @throws IllegalArgumentException When the colon is missing. */ private static void addColumn(Scan scan, byte[] familyAndQualifier) { byte [][] fq = KeyValue.parseColumn(familyAndQualifier); - if (fq.length > 1 && fq[1] != null && fq[1].length > 0) { + if (fq.length > 1) { scan.addColumn(fq[0], fq[1]); } else { scan.addFamily(fq[0]); @@ -180,9 +178,12 @@ implements Configurable { /** * Adds an array of columns specified using old format, family:qualifier. *
- * Overrides previous calls to addFamily for any families in the input. + * Overrides previous calls to {@link Scan#addColumn(byte[], byte[])}for any families in the + * input. * - * @param columns array of columns, formatted as
family:qualifier+ * @param scan The Scan to update. + * @param columns array of columns, formatted as
family:qualifier
+ * @see Scan#addColumn(byte[], byte[])
*/
public static void addColumns(Scan scan, byte [][] columns) {
for (byte[] column : columns) {
@@ -191,13 +192,10 @@ implements Configurable {
}
/**
- * Convenience method to help parse old style (or rather user entry on the
- * command line) column definitions, e.g. "data:contents mime:". The columns
- * must be space delimited and always have a colon (":") to denote family
- * and qualifier.
+ * Convenience method to parse a string representation of an array of column specifiers.
*
+ * @param scan The Scan to update.
* @param columns The columns to parse.
- * @return A reference to this instance.
*/
private static void addColumns(Scan scan, String columns) {
String[] cols = columns.split(" ");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index c90ce6b..486ef92 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -63,8 +63,8 @@ import org.apache.hadoop.net.DNS;
* Bytes.toBytes("exampleTable"));
* // mandatory
* setHTable(exampleTable);
- * Text[] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
- * Bytes.toBytes("columnB") };
+ * Text[] inputColumns = new byte [][] { Bytes.toBytes("cf1:columnA"),
+ * Bytes.toBytes("cf2") };
* // mandatory
* setInputColumns(inputColumns);
* RowFilterInterface exampleFilter = new RegExpRowFilter("keyPrefix.*");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index 2a397ec..854c1db 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -217,11 +217,12 @@ public class RowResource extends ResourceBase {
.build();
}
byte [][] parts = KeyValue.parseColumn(col);
- if (parts.length == 2 && parts[1].length > 0) {
- put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
- } else {
- put.add(parts[0], null, cell.getTimestamp(), cell.getValue());
+ if (parts.length == 1) {
+ return Response.status(Response.Status.BAD_REQUEST)
+ .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+ .build();
}
+ put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
}
puts.add(put);
if (LOG.isDebugEnabled()) {
@@ -285,11 +286,12 @@ public class RowResource extends ResourceBase {
}
Put put = new Put(row);
byte parts[][] = KeyValue.parseColumn(column);
- if (parts.length == 2 && parts[1].length > 0) {
- put.add(parts[0], parts[1], timestamp, message);
- } else {
- put.add(parts[0], null, timestamp, message);
+ if (parts.length == 1) {
+ return Response.status(Response.Status.BAD_REQUEST)
+ .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+ .build();
}
+ put.add(parts[0], parts[1], timestamp, message);
table = servlet.getTable(tableResource.getName());
table.put(put);
if (LOG.isDebugEnabled()) {
@@ -373,13 +375,13 @@ public class RowResource extends ResourceBase {
for (byte[] column: rowspec.getColumns()) {
byte[][] split = KeyValue.parseColumn(column);
if (rowspec.hasTimestamp()) {
- if (split.length == 2 && split[1].length != 0) {
+ if (split.length == 2) {
delete.deleteColumns(split[0], split[1], rowspec.getTimestamp());
} else {
delete.deleteFamily(split[0], rowspec.getTimestamp());
}
} else {
- if (split.length == 2 && split[1].length != 0) {
+ if (split.length == 2) {
delete.deleteColumns(split[0], split[1]);
} else {
delete.deleteFamily(split[0]);
@@ -441,7 +443,7 @@ public class RowResource extends ResourceBase {
CellModel valueToCheckCell = cellModels.get(cellModelCount - 1);
byte[] valueToCheckColumn = valueToCheckCell.getColumn();
byte[][] valueToPutParts = KeyValue.parseColumn(valueToCheckColumn);
- if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) {
+ if (valueToPutParts.length == 2) {
CellModel valueToPutCell = null;
for (int i = 0, n = cellModelCount - 1; i < n ; i++) {
if(Bytes.equals(cellModels.get(i).getColumn(),
@@ -527,7 +529,7 @@ public class RowResource extends ResourceBase {
}
}
byte[][] parts = KeyValue.parseColumn(valueToDeleteColumn);
- if (parts.length == 2 && parts[1].length > 0) {
+ if (parts.length == 2) {
delete.deleteColumns(parts[0], parts[1]);
} else {
return Response.status(Response.Status.BAD_REQUEST)
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
index ed79607..e4f721b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
@@ -50,7 +50,7 @@ public class RowResultGenerator extends ResultGenerator {
if (rowspec.hasColumns()) {
for (byte[] col: rowspec.getColumns()) {
byte[][] split = KeyValue.parseColumn(col);
- if (split.length == 2 && split[1].length != 0) {
+ if (split.length == 2) {
get.addColumn(split[0], split[1]);
} else {
get.addFamily(split[0]);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
index ebeae0d..003d299 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
@@ -76,10 +76,10 @@ public class ScannerResultGenerator extends ResultGenerator {
byte[][] columns = rowspec.getColumns();
for (byte[] column: columns) {
byte[][] split = KeyValue.parseColumn(column);
- if (split.length > 1 && (split[1] != null && split[1].length != 0)) {
- scan.addColumn(split[0], split[1]);
- } else {
+ if (split.length == 1) {
scan.addFamily(split[0]);
+ } else {
+ scan.addColumn(split[0], split[1]);
}
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
index 7586693..c70e7fb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
@@ -28,17 +28,8 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
-import com.google.protobuf.Service;
-import com.google.protobuf.ServiceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.*;
-import org.apache.hadoop.hbase.client.coprocessor.Batch;
-import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-import org.apache.hadoop.util.StringUtils;
-
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@@ -48,17 +39,22 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.client.RowMutations;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Row;
+import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.io.TimeRange;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.rest.Constants;
import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
@@ -66,6 +62,10 @@ import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.rest.model.ScannerModel;
import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.util.StringUtils;
+
+import com.google.protobuf.Service;
+import com.google.protobuf.ServiceException;
/**
* HTable interface to remote tables accessed via REST gateway
@@ -172,7 +172,7 @@ public class RemoteHTable implements HTableInterface {
for (CellModel cell: row.getCells()) {
byte[][] split = KeyValue.parseColumn(cell.getColumn());
byte[] column = split[0];
- byte[] qualifier = split.length > 1 ? split[1] : null;
+ byte[] qualifier = split.length > 1 ? split[1] : HConstants.EMPTY_BYTE_ARRAY;
kvs.add(new KeyValue(row.getKey(), column, qualifier,
cell.getTimestamp(), cell.getValue()));
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
index c22aa83..6048874 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
@@ -197,12 +197,10 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
private boolean internalQueueTincrement(TIncrement inc) throws TException {
byte[][] famAndQf = KeyValue.parseColumn(inc.getColumn());
- if (famAndQf.length < 1) return false;
- byte[] qual = famAndQf.length == 1 ? new byte[0] : famAndQf[1];
+ if (famAndQf.length < 2) return false;
- return internalQueueIncrement(inc.getTable(), inc.getRow(), famAndQf[0], qual,
+ return internalQueueIncrement(inc.getTable(), inc.getRow(), famAndQf[0], famAndQf[1],
inc.getAmmount());
-
}
private boolean internalQueueIncrement(byte[] tableName, byte[] rowKey, byte[] fam,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index eecc9e2..e0e899d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -651,8 +651,8 @@ public class ThriftServerRunner implements Runnable {
Map