diff --git src/main/java/org/apache/hadoop/hbase/KeyValue.java src/main/java/org/apache/hadoop/hbase/KeyValue.java index 39d1f09..1c0be3d 100644 --- src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -27,6 +27,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.Map; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.io.HeapSize; @@ -1376,10 +1377,10 @@ public class KeyValue implements Writable, HeapSize { // If no delimiter, return array of size 1 return new byte [][] { c }; } else if(index == c.length - 1) { - // Only a family, return array size 1 + // family with empty qualifier, return array size 1 byte [] family = new byte[c.length-1]; System.arraycopy(c, 0, family, 0, family.length); - return new byte [][] { family }; + return new byte [][] {family, HConstants.EMPTY_BYTE_ARRAY}; } // Family and column, return array size 2 final byte [][] result = new byte [2][]; diff --git src/main/java/org/apache/hadoop/hbase/rest/RowResource.java src/main/java/org/apache/hadoop/hbase/rest/RowResource.java index 5bdede7..b9d5a15 100644 --- src/main/java/org/apache/hadoop/hbase/rest/RowResource.java +++ src/main/java/org/apache/hadoop/hbase/rest/RowResource.java @@ -90,6 +90,7 @@ public class RowResource extends ResourceBase { ResultGenerator generator = ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null); if (!generator.hasNext()) { + servlet.getMetrics().incrementFailedGetRequests(1); return Response.status(Response.Status.NOT_FOUND) .type(MIMETYPE_TEXT).entity("Not found" + CRLF) .build(); @@ -116,7 +117,7 @@ public class RowResource extends ResourceBase { servlet.getMetrics().incrementSucessfulGetRequests(1); return Response.ok(model).build(); } catch (RuntimeException e) { - servlet.getMetrics().incrementFailedPutRequests(1); + servlet.getMetrics().incrementFailedGetRequests(1); if (e.getCause() instanceof TableNotFoundException) { return Response.status(Response.Status.NOT_FOUND) .type(MIMETYPE_TEXT).entity("Not found" + CRLF) @@ -126,7 +127,7 @@ public class RowResource extends ResourceBase { .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) .build(); } catch (Exception e) { - servlet.getMetrics().incrementFailedPutRequests(1); + servlet.getMetrics().incrementFailedGetRequests(1); return Response.status(Response.Status.SERVICE_UNAVAILABLE) .type(MIMETYPE_TEXT).entity("Unavailable" + CRLF) .build(); @@ -143,14 +144,15 @@ public class RowResource extends ResourceBase { // doesn't make sense to use a non specific coordinate as this can only // return a single cell if (!rowspec.hasColumns() || rowspec.getColumns().length > 1) { - return Response.status(Response.Status.BAD_REQUEST) - .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) - .build(); + servlet.getMetrics().incrementFailedGetRequests(1); + return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT) + .entity("Bad request: Either 0 or more than 1 columns specified." + CRLF).build(); } try { ResultGenerator generator = ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null); if (!generator.hasNext()) { + servlet.getMetrics().incrementFailedGetRequests(1); return Response.status(Response.Status.NOT_FOUND) .type(MIMETYPE_TEXT).entity("Not found" + CRLF) .build(); diff --git src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java index 8d6cfb4..f7de85f 100644 --- src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java +++ src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java @@ -29,12 +29,14 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.HTablePool; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.util.Bytes; public class RowResultGenerator extends ResultGenerator { private static final Log LOG = LogFactory.getLog(RowResultGenerator.class); @@ -51,8 +53,12 @@ public class RowResultGenerator extends ResultGenerator { if (rowspec.hasColumns()) { for (byte[] col: rowspec.getColumns()) { byte[][] split = KeyValue.parseColumn(col); - if (split.length == 2 && split[1].length != 0) { - get.addColumn(split[0], split[1]); + if (split.length == 2) { + if (split[1].length != 0) { + get.addColumn(split[0], split[1]); + } else { + get.addColumn(split[0], HConstants.EMPTY_BYTE_ARRAY); + } } else { get.addFamily(split[0]); } diff --git src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java index 6cff851..6b5e016 100644 --- src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java +++ src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java @@ -20,10 +20,14 @@ package org.apache.hadoop.hbase.rest; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.StringWriter; import java.net.URLEncoder; +import java.util.List; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; @@ -32,7 +36,11 @@ import javax.xml.bind.Unmarshaller; import org.apache.commons.httpclient.Header; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Cluster; @@ -41,9 +49,6 @@ import org.apache.hadoop.hbase.rest.model.CellModel; import org.apache.hadoop.hbase.rest.model.CellSetModel; import org.apache.hadoop.hbase.rest.model.RowModel; import org.apache.hadoop.hbase.util.Bytes; - -import static org.junit.Assert.*; - import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -56,6 +61,7 @@ public class TestRowResource { private static final String CFB = "b"; private static final String COLUMN_1 = CFA + ":1"; private static final String COLUMN_2 = CFB + ":2"; + private static final String COLUMN_3 = CFA + ":"; private static final String ROW_1 = "testrow1"; private static final String VALUE_1 = "testvalue1"; private static final String ROW_2 = "testrow2"; @@ -684,6 +690,59 @@ public class TestRowResource { assertEquals(response.getCode(), 400); } + @Test + public void testMultiColumnGetXML() throws Exception { + String path = "/" + TABLE + "/fakerow"; + + CellSetModel cellSetModel = new CellSetModel(); + RowModel rowModel = new RowModel(ROW_1); + rowModel.addCell(new CellModel(Bytes.toBytes(COLUMN_1), + Bytes.toBytes(VALUE_1))); + rowModel.addCell(new CellModel(Bytes.toBytes(COLUMN_2), + Bytes.toBytes(VALUE_2))); + rowModel.addCell(new CellModel(Bytes.toBytes(COLUMN_3), + Bytes.toBytes(VALUE_2))); + cellSetModel.addRow(rowModel); + StringWriter writer = new StringWriter(); + marshaller.marshal(cellSetModel, writer); + + Response response = client.put(path, Constants.MIMETYPE_XML, + Bytes.toBytes(writer.toString())); + Thread.yield(); + + // make sure the fake row was not actually created + response = client.get(path, Constants.MIMETYPE_XML); + assertEquals(response.getCode(), 404); + + //Try getting all the column values at once. + path = "/" + TABLE + "/" + ROW_1 + "/" + COLUMN_1 + "," + COLUMN_2 + "," + COLUMN_3; + response = client.get(path, Constants.MIMETYPE_XML); + assertEquals(200, response.getCode()); + CellSetModel cellSet = (CellSetModel) + unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); + assertTrue(cellSet.getRows().size() == 1); + assertTrue(cellSet.getRows().get(0).getCells().size() == 3); + List cells = cellSet.getRows().get(0).getCells(); + + assertTrue(containsCellModel(cells, COLUMN_1, VALUE_1)); + assertTrue(containsCellModel(cells, COLUMN_2, VALUE_2)); + assertTrue(containsCellModel(cells, COLUMN_3, VALUE_2)); + response = deleteRow(TABLE, ROW_1); + assertEquals(response.getCode(), 200); + } + + private boolean containsCellModel(List cells, String column, String value) { + boolean contains = false; + for (CellModel cell : cells) { + if (Bytes.toString(cell.getColumn()).equals(column) + && Bytes.toString(cell.getValue()).equals(value)) { + contains = true; + return contains; + } + } + return contains; + } + @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();