diff --git src/main/java/org/apache/hadoop/hbase/KeyValue.java src/main/java/org/apache/hadoop/hbase/KeyValue.java index 39d1f09..68bd10d 100644 --- src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -27,6 +27,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.Map; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.io.HeapSize; @@ -1376,10 +1377,10 @@ public class KeyValue implements Writable, HeapSize { // If no delimiter, return array of size 1 return new byte [][] { c }; } else if(index == c.length - 1) { - // Only a family, return array size 1 + // family with empty qualifier, return array size 1 byte [] family = new byte[c.length-1]; System.arraycopy(c, 0, family, 0, family.length); - return new byte [][] { family }; + return new byte [][] {family, Bytes.toBytes(StringUtils.EMPTY)}; } // Family and column, return array size 2 final byte [][] result = new byte [2][]; diff --git src/main/java/org/apache/hadoop/hbase/rest/RowResource.java src/main/java/org/apache/hadoop/hbase/rest/RowResource.java index 5bdede7..b9d5a15 100644 --- src/main/java/org/apache/hadoop/hbase/rest/RowResource.java +++ src/main/java/org/apache/hadoop/hbase/rest/RowResource.java @@ -90,6 +90,7 @@ public class RowResource extends ResourceBase { ResultGenerator generator = ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null); if (!generator.hasNext()) { + servlet.getMetrics().incrementFailedGetRequests(1); return Response.status(Response.Status.NOT_FOUND) .type(MIMETYPE_TEXT).entity("Not found" + CRLF) .build(); @@ -116,7 +117,7 @@ public class RowResource extends ResourceBase { servlet.getMetrics().incrementSucessfulGetRequests(1); return Response.ok(model).build(); } catch (RuntimeException e) { - servlet.getMetrics().incrementFailedPutRequests(1); + servlet.getMetrics().incrementFailedGetRequests(1); if (e.getCause() instanceof TableNotFoundException) { return Response.status(Response.Status.NOT_FOUND) .type(MIMETYPE_TEXT).entity("Not found" + CRLF) @@ -126,7 +127,7 @@ public class RowResource extends ResourceBase { .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) .build(); } catch (Exception e) { - servlet.getMetrics().incrementFailedPutRequests(1); + servlet.getMetrics().incrementFailedGetRequests(1); return Response.status(Response.Status.SERVICE_UNAVAILABLE) .type(MIMETYPE_TEXT).entity("Unavailable" + CRLF) .build(); @@ -143,14 +144,15 @@ public class RowResource extends ResourceBase { // doesn't make sense to use a non specific coordinate as this can only // return a single cell if (!rowspec.hasColumns() || rowspec.getColumns().length > 1) { - return Response.status(Response.Status.BAD_REQUEST) - .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) - .build(); + servlet.getMetrics().incrementFailedGetRequests(1); + return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT) + .entity("Bad request: Either 0 or more than 1 columns specified." + CRLF).build(); } try { ResultGenerator generator = ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null); if (!generator.hasNext()) { + servlet.getMetrics().incrementFailedGetRequests(1); return Response.status(Response.Status.NOT_FOUND) .type(MIMETYPE_TEXT).entity("Not found" + CRLF) .build(); diff --git src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java index 8d6cfb4..ea8802c 100644 --- src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java +++ src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.HTablePool; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.util.Bytes; public class RowResultGenerator extends ResultGenerator { private static final Log LOG = LogFactory.getLog(RowResultGenerator.class); @@ -51,8 +52,12 @@ public class RowResultGenerator extends ResultGenerator { if (rowspec.hasColumns()) { for (byte[] col: rowspec.getColumns()) { byte[][] split = KeyValue.parseColumn(col); - if (split.length == 2 && split[1].length != 0) { - get.addColumn(split[0], split[1]); + if (split.length == 2) { + if (split[1].length != 0) { + get.addColumn(split[0], split[1]); + } else { + get.addColumn(split[0], Bytes.toBytes("")); + } } else { get.addFamily(split[0]); } diff --git src/main/java/org/apache/hadoop/hbase/rest/provider/JacksonProvider.java src/main/java/org/apache/hadoop/hbase/rest/provider/JacksonProvider.java new file mode 100644 index 0000000..7791d02 --- /dev/null +++ src/main/java/org/apache/hadoop/hbase/rest/provider/JacksonProvider.java @@ -0,0 +1,31 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.rest.provider; + +import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider; + +import javax.ws.rs.ext.Provider; + +//create a class in the defined resource package name +//so it gets activated +//Use jackson to take care of json +//since it has better support for object +//deserializaiton and less clunky to deal with +@Provider +public class JacksonProvider extends JacksonJaxbJsonProvider { +} diff --git src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java index 6cff851..d09596a 100644 --- src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java +++ src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java @@ -24,7 +24,9 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.StringWriter; import java.net.URLEncoder; +import java.util.List; +import javax.ws.rs.core.MediaType; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; @@ -40,7 +42,9 @@ import org.apache.hadoop.hbase.rest.client.Response; import org.apache.hadoop.hbase.rest.model.CellModel; import org.apache.hadoop.hbase.rest.model.CellSetModel; import org.apache.hadoop.hbase.rest.model.RowModel; +import org.apache.hadoop.hbase.rest.provider.JacksonProvider; import org.apache.hadoop.hbase.util.Bytes; +import org.codehaus.jackson.map.ObjectMapper; import static org.junit.Assert.*; @@ -56,6 +60,7 @@ public class TestRowResource { private static final String CFB = "b"; private static final String COLUMN_1 = CFA + ":1"; private static final String COLUMN_2 = CFB + ":2"; + private static final String COLUMN_3 = CFA + ":"; private static final String ROW_1 = "testrow1"; private static final String VALUE_1 = "testvalue1"; private static final String ROW_2 = "testrow2"; @@ -64,6 +69,7 @@ public class TestRowResource { private static final String VALUE_3 = "testvalue3"; private static final String ROW_4 = "testrow4"; private static final String VALUE_4 = "testvalue4"; + protected static ObjectMapper jsonMapper; private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final HBaseRESTTestingUtility REST_TEST_UTIL = @@ -85,6 +91,8 @@ public class TestRowResource { RowModel.class); marshaller = context.createMarshaller(); unmarshaller = context.createUnmarshaller(); + jsonMapper = new JacksonProvider() + .locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE); client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort())); HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); @@ -683,6 +691,57 @@ public class TestRowResource { Bytes.toBytes(writer.toString())); assertEquals(response.getCode(), 400); } + + @Test + public void testMultiColumnGetJSON() throws Exception { + String path = "/" + TABLE + "/fakerow"; + + CellSetModel cellSetModel = new CellSetModel(); + RowModel rowModel = new RowModel(ROW_1); + rowModel.addCell(new CellModel(Bytes.toBytes(COLUMN_1), + Bytes.toBytes(VALUE_1))); + rowModel.addCell(new CellModel(Bytes.toBytes(COLUMN_2), + Bytes.toBytes(VALUE_2))); + rowModel.addCell(new CellModel(Bytes.toBytes(COLUMN_3), + Bytes.toBytes(VALUE_2))); + cellSetModel.addRow(rowModel); + String jsonString = jsonMapper.writeValueAsString(cellSetModel); + + Response response = client.put(path, Constants.MIMETYPE_JSON, + Bytes.toBytes(jsonString)); + Thread.yield(); + + // make sure the fake row was not actually created + response = client.get(path, Constants.MIMETYPE_JSON); + assertEquals(response.getCode(), 404); + + //Try getting all the column values at once. + path = "/" + TABLE + "/" + ROW_1 + "/" + COLUMN_1 + "," + COLUMN_2 + "," + COLUMN_3; + response = client.get(path, Constants.MIMETYPE_JSON); + assertEquals(200, response.getCode()); + CellSetModel cellSet = jsonMapper.readValue(response.getBody(), CellSetModel.class); + assertTrue(cellSet.getRows().size() == 1); + assertTrue(cellSet.getRows().get(0).getCells().size() == 3); + List cells = cellSet.getRows().get(0).getCells(); + + assertTrue(containsCellModel(cells, COLUMN_1, VALUE_1)); + assertTrue(containsCellModel(cells, COLUMN_2, VALUE_2)); + assertTrue(containsCellModel(cells, COLUMN_3, VALUE_2)); + response = deleteRow(TABLE, ROW_1); + assertEquals(response.getCode(), 200); + } + + private boolean containsCellModel(List cells, String column, String value) { + boolean contains = false; + for (CellModel cell : cells) { + if (Bytes.toString(cell.getColumn()).equals(column) + && Bytes.toString(cell.getValue()).equals(value)) { + contains = true; + return contains; + } + } + return contains; + } @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =