Index: src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java (revision 1230294) +++ src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java (working copy) @@ -204,6 +204,70 @@ return response; } + private static Response checkAndPutValueXML(String url, String table, + String row, String column, String valueToCheck, String valueToPut) + throws IOException, JAXBException { + RowModel rowModel = new RowModel(row); + rowModel.addCell(new CellModel(Bytes.toBytes(column), Bytes + .toBytes(valueToPut))); + rowModel.addCell(new CellModel(Bytes.toBytes(column), Bytes + .toBytes(valueToCheck))); + CellSetModel cellSetModel = new CellSetModel(); + cellSetModel.addRow(rowModel); + StringWriter writer = new StringWriter(); + marshaller.marshal(cellSetModel, writer); + Response response = client.put(url, Constants.MIMETYPE_XML, + Bytes.toBytes(writer.toString())); + Thread.yield(); + return response; + } + + private static Response checkAndPutValueXML(String table, String row, + String column, String valueToCheck, String valueToPut) + throws IOException, JAXBException { + StringBuilder path = new StringBuilder(); + path.append('/'); + path.append("checkandput"); + path.append('/'); + path.append(table); + path.append('/'); + path.append(row); + path.append('/'); + path.append(column); + return checkAndPutValueXML(path.toString(), table, row, column, + valueToCheck, valueToPut); + } + + private static Response checkAndDeleteXML(String url, String table, + String row, String column, String valueToCheck) throws IOException, + JAXBException { + RowModel rowModel = new RowModel(row); + rowModel.addCell(new CellModel(Bytes.toBytes(column), Bytes + .toBytes(valueToCheck))); + CellSetModel cellSetModel = new CellSetModel(); + cellSetModel.addRow(rowModel); + StringWriter writer = new StringWriter(); + marshaller.marshal(cellSetModel, writer); + Response response = client.put(url, Constants.MIMETYPE_XML, + Bytes.toBytes(writer.toString())); + Thread.yield(); + return response; + } + + private static Response checkAndDeleteXML(String table, String row, + String column, String valueToCheck) throws IOException, JAXBException { + StringBuilder path = new StringBuilder(); + path.append('/'); + path.append("checkanddelete"); + path.append('/'); + path.append(table); + path.append('/'); + path.append(row); + path.append('/'); + path.append(column); + return checkAndDeleteXML(path.toString(), table, row, column, valueToCheck); + } + private static void checkValueXML(String table, String row, String column, String value) throws IOException, JAXBException { Response response = getValueXML(table, row, column); @@ -253,6 +317,51 @@ return response; } + private static Response checkAndPutValuePB(String url, String table, + String row, String column, String valueToCheck, String valueToPut) + throws IOException { + RowModel rowModel = new RowModel(row); + rowModel.addCell(new CellModel(Bytes.toBytes(column), Bytes + .toBytes(valueToPut))); + rowModel.addCell(new CellModel(Bytes.toBytes(column), Bytes + .toBytes(valueToCheck))); + CellSetModel cellSetModel = new CellSetModel(); + cellSetModel.addRow(rowModel); + Response response = client.put(url, Constants.MIMETYPE_PROTOBUF, + cellSetModel.createProtobufOutput()); + Thread.yield(); + return response; + } + + private static Response checkAndPutValuePB(String table, String row, + String column, String valueToCheck, String valueToPut) throws IOException { + StringBuilder path = new StringBuilder(); + path.append('/'); + path.append("checkandput"); + path.append('/'); + path.append(table); + path.append('/'); + path.append(row); + path.append('/'); + path.append(column); + return checkAndPutValuePB(path.toString(), table, row, column, + valueToCheck, valueToPut); + } + + private static Response checkAndDeletePB(String table, String row, + String column, String value) throws IOException { + StringBuilder path = new StringBuilder(); + path.append('/'); + path.append("checkanddelete"); + path.append('/'); + path.append(table); + path.append('/'); + path.append(row); + path.append('/'); + path.append(column); + return checkAndPutValuePB(path.toString(), table, row, column, value, value); + } + private static void checkValuePB(String table, String row, String column, String value) throws IOException { Response response = getValuePB(table, row, column); @@ -282,6 +391,13 @@ assertEquals(response.getCode(), 404); checkValueXML(TABLE, ROW_1, COLUMN_2, VALUE_2); + response = putValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); + assertEquals(response.getCode(), 200); + response = checkAndDeletePB(TABLE, ROW_1, COLUMN_1, VALUE_1); + assertEquals(response.getCode(), 200); + response = getValueXML(TABLE, ROW_1, COLUMN_1); + assertEquals(response.getCode(), 404); + response = deleteRow(TABLE, ROW_1); assertEquals(response.getCode(), 200); response = getValueXML(TABLE, ROW_1, COLUMN_1); @@ -300,8 +416,14 @@ assertEquals(response.getCode(), 403); response = putValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); assertEquals(response.getCode(), 403); + response = checkAndPutValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1, VALUE_2); + assertEquals(response.getCode(), 403); + response = checkAndPutValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1, VALUE_2); + assertEquals(response.getCode(), 403); response = deleteValue(TABLE, ROW_1, COLUMN_1); assertEquals(response.getCode(), 403); + response = checkAndDeletePB(TABLE, ROW_1, COLUMN_1, VALUE_1); + assertEquals(response.getCode(), 403); response = deleteRow(TABLE, ROW_1); assertEquals(response.getCode(), 403); @@ -311,6 +433,10 @@ assertEquals(response.getCode(), 200); response = putValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); assertEquals(response.getCode(), 200); + response = checkAndPutValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1, VALUE_2); + assertEquals(response.getCode(), 200); + response = checkAndPutValuePB(TABLE, ROW_1, COLUMN_1, VALUE_2, VALUE_3); + assertEquals(response.getCode(), 200); response = deleteValue(TABLE, ROW_1, COLUMN_1); assertEquals(response.getCode(), 200); response = deleteRow(TABLE, ROW_1); @@ -328,7 +454,13 @@ response = putValueXML(TABLE, ROW_1, COLUMN_1, VALUE_2); assertEquals(response.getCode(), 200); checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_2); + response = checkAndPutValueXML(TABLE, ROW_1, COLUMN_1, VALUE_2, VALUE_3); + assertEquals(response.getCode(), 200); + checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_3); + response = checkAndDeleteXML(TABLE, ROW_1, COLUMN_1, VALUE_3); + assertEquals(response.getCode(), 200); + response = deleteRow(TABLE, ROW_1); assertEquals(response.getCode(), 200); } @@ -349,6 +481,13 @@ assertEquals(response.getCode(), 200); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_2); + response = checkAndPutValuePB(TABLE, ROW_1, COLUMN_1, VALUE_2, VALUE_3); + assertEquals(response.getCode(), 200); + checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_3); + response = checkAndPutValueXML(TABLE, ROW_1, COLUMN_1, VALUE_3, VALUE_4); + assertEquals(response.getCode(), 200); + checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_4); + response = deleteRow(TABLE, ROW_1); assertEquals(response.getCode(), 200); } Index: src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java (revision 1230294) +++ src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java (working copy) @@ -264,6 +264,24 @@ assertTrue(Bytes.equals(VALUE_2, value)); } + @Test + public void testCheckAndPut() throws IOException { + Put put = new Put(ROW_3); + put.add(COLUMN_1, QUALIFIER_1, VALUE_1); + remoteTable.put(put); + + Put checkAndPut = new Put(ROW_3); + checkAndPut.add(COLUMN_1, QUALIFIER_1, VALUE_2); + remoteTable.checkAndPut(ROW_3, COLUMN_1, QUALIFIER_1, VALUE_1, checkAndPut); + + Get get = new Get(ROW_3); + get.addFamily(COLUMN_1); + Result result = remoteTable.get(get); + byte[] value = result.getValue(COLUMN_1, QUALIFIER_1); + assertNotNull(value); + assertTrue(Bytes.equals(VALUE_2, value)); + } + public void testDelete() throws IOException { Put put = new Put(ROW_3); put.add(COLUMN_1, QUALIFIER_1, VALUE_1); @@ -308,6 +326,51 @@ assertNull(value2); } + public void testCheckAndDelete() throws IOException { + Put put = new Put(ROW_3); + put.add(COLUMN_1, QUALIFIER_1, VALUE_1); + put.add(COLUMN_2, QUALIFIER_2, VALUE_2); + remoteTable.put(put); + + Get get = new Get(ROW_3); + get.addFamily(COLUMN_1); + get.addFamily(COLUMN_2); + Result result = remoteTable.get(get); + byte[] value1 = result.getValue(COLUMN_1, QUALIFIER_1); + byte[] value2 = result.getValue(COLUMN_2, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_1, value1)); + assertNotNull(value2); + assertTrue(Bytes.equals(VALUE_2, value2)); + + Delete delete = new Delete(ROW_3); + delete.deleteColumn(COLUMN_2, QUALIFIER_2); + remoteTable.checkAndDelete(ROW_3, COLUMN_2, QUALIFIER_2, VALUE_2, delete); + + get = new Get(ROW_3); + get.addFamily(COLUMN_1); + get.addFamily(COLUMN_2); + result = remoteTable.get(get); + value1 = result.getValue(COLUMN_1, QUALIFIER_1); + value2 = result.getValue(COLUMN_2, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_1, value1)); + assertNull(value2); + + delete = new Delete(ROW_3); + delete.deleteColumn(COLUMN_1, QUALIFIER_1); + remoteTable.checkAndDelete(ROW_3, COLUMN_1, QUALIFIER_1, VALUE_1, delete); + + get = new Get(ROW_3); + get.addFamily(COLUMN_1); + get.addFamily(COLUMN_2); + result = remoteTable.get(get); + value1 = result.getValue(COLUMN_1, QUALIFIER_1); + value2 = result.getValue(COLUMN_2, QUALIFIER_2); + assertNull(value1); + assertNull(value2); + } + public void testScanner() throws IOException { List puts = new ArrayList(); Put put = new Put(ROW_1); Index: src/main/java/org/apache/hadoop/hbase/rest/TableResource.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/rest/TableResource.java (revision 1230294) +++ src/main/java/org/apache/hadoop/hbase/rest/TableResource.java (working copy) @@ -21,207 +21,25 @@ package org.apache.hadoop.hbase.rest; import java.io.IOException; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentSkipListMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import javax.ws.rs.Encoded; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.QueryParam; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.HBaseAdmin; -import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.rest.transform.NullTransform; -import org.apache.hadoop.hbase.rest.transform.Transform; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.util.StringUtils; public class TableResource extends ResourceBase { - private static final Log LOG = LogFactory.getLog(TableResource.class); /** - * HCD attributes starting with this string are considered transform - * directives - */ - private static final String DIRECTIVE_KEY = "Transform$"; - - /** - * Transform directives are of the form <qualifier>:<class> - * where qualifier is a string for exact matching or '*' as a wildcard - * that will match anything; and class is either the fully qualified - * class name of a transform implementation or can be the short name of a - * transform in the org.apache.hadoop.hbase.rest.transform package. - */ - private static final Pattern DIRECTIVE_PATTERN = - Pattern.compile("([^\\:]+)\\:([^\\,]+)\\,?"); - private static final Transform defaultTransform = new NullTransform(); - private static final - Map>> transformMap = - new ConcurrentHashMap>>(); - private static final Map lastCheckedMap = - new ConcurrentHashMap(); - - /** - * @param table the table - * @param family the column family - * @param qualifier the column qualifier, or null - * @return the transformation specified for the given family or qualifier, if - * any, otherwise the default - */ - static Transform getTransform(String table, byte[] family, byte[] qualifier) { - if (qualifier == null) { - qualifier = HConstants.EMPTY_BYTE_ARRAY; - } - Map> familyMap = transformMap.get(table); - if (familyMap != null) { - Map columnMap = familyMap.get(family); - if (columnMap != null) { - Transform t = columnMap.get(qualifier); - // check as necessary if there is a wildcard entry - if (t == null) { - t = columnMap.get(HConstants.EMPTY_BYTE_ARRAY); - } - // if we found something, return it, otherwise we will return the - // default by falling through - if (t != null) { - return t; - } - } - } - return defaultTransform; - } - - synchronized static void setTransform(String table, byte[] family, - byte[] qualifier, Transform transform) { - Map> familyMap = transformMap.get(table); - if (familyMap == null) { - familyMap = new ConcurrentSkipListMap>( - Bytes.BYTES_COMPARATOR); - transformMap.put(table, familyMap); - } - Map columnMap = familyMap.get(family); - if (columnMap == null) { - columnMap = new ConcurrentSkipListMap( - Bytes.BYTES_COMPARATOR); - familyMap.put(family, columnMap); - } - // if transform is null, remove any existing entry - if (transform != null) { - columnMap.put(qualifier, transform); - } else { - columnMap.remove(qualifier); - } - } - - String table; - - /** - * Scan the table schema for transform directives. These are column family - * attributes containing a comma-separated list of elements of the form - * <qualifier>:<transform-class>, where qualifier - * can be a string for exact matching or '*' as a wildcard to match anything. - * The attribute key must begin with the string "Transform$". - */ - void scanTransformAttrs() throws IOException { - try { - HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration()); - HTableDescriptor htd = admin.getTableDescriptor(Bytes.toBytes(table)); - for (HColumnDescriptor hcd: htd.getFamilies()) { - for (Map.Entry e: - hcd.getValues().entrySet()) { - // does the key start with the transform directive tag? - String key = Bytes.toString(e.getKey().get()); - if (!key.startsWith(DIRECTIVE_KEY)) { - // no, skip - continue; - } - // match a comma separated list of one or more directives - byte[] value = e.getValue().get(); - Matcher m = DIRECTIVE_PATTERN.matcher(Bytes.toString(value)); - while (m.find()) { - byte[] qualifier = HConstants.EMPTY_BYTE_ARRAY; - String s = m.group(1); - if (s.length() > 0 && !s.equals("*")) { - qualifier = Bytes.toBytes(s); - } - boolean retry = false; - String className = m.group(2); - while (true) { - try { - // if a transform was previously configured for the qualifier, - // this will simply replace it - setTransform(table, hcd.getName(), qualifier, - (Transform)Class.forName(className).newInstance()); - break; - } catch (InstantiationException ex) { - LOG.error(StringUtils.stringifyException(ex)); - if (retry) { - break; - } - retry = true; - } catch (IllegalAccessException ex) { - LOG.error(StringUtils.stringifyException(ex)); - if (retry) { - break; - } - retry = true; - } catch (ClassNotFoundException ex) { - if (retry) { - LOG.error(StringUtils.stringifyException(ex)); - break; - } - className = "org.apache.hadoop.hbase.rest.transform." + className; - retry = true; - } - } - } - } - } - } catch (TableNotFoundException e) { - // ignore - } - } - - /** * Constructor * @param table * @throws IOException */ public TableResource(String table) throws IOException { - super(); - this.table = table; - // Scanning the table schema is too expensive to do for every operation. - // Do it once per minute by default. - // Setting hbase.rest.transform.check.interval to <= 0 disables rescanning. - long now = System.currentTimeMillis(); - Long lastChecked = lastCheckedMap.get(table); - if (lastChecked != null) { - long interval = servlet.getConfiguration() - .getLong("hbase.rest.transform.check.interval", 60000); - if (interval > 0 && (now - lastChecked.longValue()) > interval) { - scanTransformAttrs(); - lastCheckedMap.put(table, now); - } - } else { - scanTransformAttrs(); - lastCheckedMap.put(table, now); - } + super(table); } - /** @return the table name */ - String getName() { - return table; - } - /** * @return true if the table exists * @throws IOException @@ -231,24 +49,6 @@ return admin.tableExists(table); } - /** - * Apply any configured transformations to the value - * @param family - * @param qualifier - * @param value - * @param direction - * @return - * @throws IOException - */ - byte[] transform(byte[] family, byte[] qualifier, byte[] value, - Transform.Direction direction) throws IOException { - Transform t = getTransform(table, family, qualifier); - if (t != null) { - return t.transform(value, direction); - } - return value; - } - @Path("exists") public ExistsResource getExistsResource() throws IOException { return new ExistsResource(this); Index: src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutTableResource.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutTableResource.java (revision 0) +++ src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutTableResource.java (revision 0) @@ -0,0 +1,50 @@ +/* + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest; + +import java.io.IOException; + +import javax.ws.rs.QueryParam; +import javax.ws.rs.PathParam; +import javax.ws.rs.Path; +import javax.ws.rs.Encoded; + +public class CheckAndPutTableResource extends ResourceBase { + + /** + * Constructor + * + * @param table + * @throws IOException + */ + public CheckAndPutTableResource(String table) throws IOException { + super(table); + } + + @Path("{rowspec: .+}") + public CheckAndPutRowResource getCheckAndPutRowResource( + // We need the @Encoded decorator so Jersey won't urldecode before + // the RowSpec constructor has a chance to parse + final @PathParam("rowspec") @Encoded String rowspec, + final @QueryParam("v") String versions) throws IOException { + return new CheckAndPutRowResource(this, rowspec, versions); + } +} Index: src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutRowResource.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutRowResource.java (revision 0) +++ src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutRowResource.java (revision 0) @@ -0,0 +1,177 @@ +/* + * Copyright 2010 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest; + +import java.io.IOException; + +import javax.ws.rs.Consumes; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.UriInfo; +import javax.ws.rs.core.Response.ResponseBuilder; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; +import org.apache.hadoop.hbase.client.HTablePool; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.rest.model.CellModel; +import org.apache.hadoop.hbase.rest.model.CellSetModel; +import org.apache.hadoop.hbase.rest.model.RowModel; +import org.apache.hadoop.hbase.rest.transform.Transform; + +public class CheckAndPutRowResource extends ResourceBase { + private static final Log LOG = LogFactory + .getLog(CheckAndPutRowResource.class); + + CheckAndPutTableResource tableResource; + RowSpec rowspec; + + /** + * Constructor + * + * @param tableResource + * @param rowspec + * @param versions + * @throws IOException + */ + public CheckAndPutRowResource(CheckAndPutTableResource tableResource, + String rowspec, String versions) throws IOException { + super(); + this.tableResource = tableResource; + this.rowspec = new RowSpec(rowspec); + if (versions != null) { + this.rowspec.setMaxVersions(Integer.valueOf(versions)); + } + } + + /** + * Validates the input request parameters, parse columns from CellSetModel, + * and invoke checkAndPut on HTable. + * + * @param model instance of CellSetModel + * @return Response 200 for OK and 304 for not-modified + */ + Response update(final CellSetModel model) { + servlet.getMetrics().incrementRequests(1); + if (servlet.isReadOnly()) { + throw new WebApplicationException(Response.Status.FORBIDDEN); + } + HTablePool pool = servlet.getTablePool(); + HTableInterface table = null; + try { + if (model.getRows().size() != 1) { + throw new WebApplicationException(Response.Status.BAD_REQUEST); + } + + RowModel rowModel = model.getRows().get(0); + byte[] key = rowModel.getKey(); + if (key == null) { + key = rowspec.getRow(); + } + if (key == null) { + throw new WebApplicationException(Response.Status.BAD_REQUEST); + } + + if (rowModel.getCells().size() != 2) { + throw new WebApplicationException(Response.Status.BAD_REQUEST); + } + + Put put = new Put(key); + + CellModel valueToPutCell = rowModel.getCells().get(0); + byte[] valueToPutColumn = valueToPutCell.getColumn(); + if (valueToPutColumn == null) { + try { + valueToPutColumn = rowspec.getColumns()[0]; + } catch (final ArrayIndexOutOfBoundsException e) { + throw new WebApplicationException(Response.Status.BAD_REQUEST); + } + } + + byte[][] valueToPutParts = KeyValue.parseColumn(valueToPutColumn); + if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) { + put.add(valueToPutParts[0], valueToPutParts[1], valueToPutCell + .getTimestamp(), tableResource.transform(valueToPutParts[0], + valueToPutParts[1], valueToPutCell.getValue(), + Transform.Direction.IN)); + } else { + throw new WebApplicationException(Response.Status.BAD_REQUEST); + } + + CellModel valueToCheckCell = rowModel.getCells().get(1); + byte[] valueToCheckColumn = valueToCheckCell.getColumn(); + if (valueToCheckColumn == null) { + try { + valueToCheckColumn = rowspec.getColumns()[1]; + } catch (final ArrayIndexOutOfBoundsException e) { + throw new WebApplicationException(Response.Status.BAD_REQUEST); + } + } + + table = pool.getTable(tableResource.getName()); + boolean retValue = table.checkAndPut(key, valueToPutParts[0], + valueToPutParts[1], valueToCheckCell.getValue(), put); + if (LOG.isDebugEnabled()) { + LOG.debug("CHECK-AND-PUT " + put.toString() + ", returns " + retValue); + } + table.flushCommits(); + ResponseBuilder response = Response.ok(); + if (!retValue) { + response = Response.status(304); + } + return response.build(); + } catch (final IOException e) { + throw new WebApplicationException(e, Response.Status.SERVICE_UNAVAILABLE); + } finally { + try { + pool.putTable(table); + } catch (IOException ioe) { + throw new WebApplicationException(ioe, + Response.Status.SERVICE_UNAVAILABLE); + } + } + } + + @PUT + @Consumes({ MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF }) + public Response put(final CellSetModel model, final @Context UriInfo uriInfo) { + if (LOG.isDebugEnabled()) { + LOG.debug("PUT " + uriInfo.getAbsolutePath()); + } + return update(model); + } + + @POST + @Consumes({ MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF }) + public Response post(final CellSetModel model, final @Context UriInfo uriInfo) { + if (LOG.isDebugEnabled()) { + LOG.debug("POST " + uriInfo.getAbsolutePath()); + } + return update(model); + } +} Index: src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteTableResource.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteTableResource.java (revision 0) +++ src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteTableResource.java (revision 0) @@ -0,0 +1,50 @@ +/* + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest; + +import java.io.IOException; + +import javax.ws.rs.QueryParam; +import javax.ws.rs.PathParam; +import javax.ws.rs.Path; +import javax.ws.rs.Encoded; + +public class CheckAndDeleteTableResource extends ResourceBase { + + /** + * Constructor + * + * @param table + * @throws IOException + */ + public CheckAndDeleteTableResource(String table) throws IOException { + super(table); + } + + @Path("{rowspec: .+}") + public CheckAndDeleteRowResource getCheckAndDeleteRowResource( + // We need the @Encoded decorator so Jersey won't urldecode before + // the RowSpec constructor has a chance to parse + final @PathParam("rowspec") @Encoded String rowspec, + final @QueryParam("v") String versions) throws IOException { + return new CheckAndDeleteRowResource(this, rowspec, versions); + } +} Index: src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java (revision 1230294) +++ src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java (working copy) @@ -21,12 +21,224 @@ package org.apache.hadoop.hbase.rest; import java.io.IOException; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentSkipListMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.client.HTableInterface; +import org.apache.hadoop.hbase.client.HTablePool; +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.rest.transform.NullTransform; +import org.apache.hadoop.hbase.rest.transform.Transform; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + public class ResourceBase implements Constants { + private static final Log LOG = LogFactory.getLog(ResourceBase.class); + RESTServlet servlet; + protected String table; + /** + * HCD attributes starting with this string are considered transform + * directives + */ + protected static final String DIRECTIVE_KEY = "Transform$"; + + /** + * Transform directives are of the form + * <qualifier>:<class> where qualifier is a + * string for exact matching or '*' as a wildcard that will match anything; + * and class is either the fully qualified class name of a transform + * implementation or can be the short name of a transform in the + * org.apache.hadoop.hbase.rest.transform package. + */ + protected static final Pattern DIRECTIVE_PATTERN = Pattern + .compile("([^\\:]+)\\:([^\\,]+)\\,?"); + protected static final Transform defaultTransform = new NullTransform(); + /** + * Map>> + */ + protected static final + Map>> transformMap = + new ConcurrentHashMap>>(); + protected static final Map lastCheckedMap = + new ConcurrentHashMap(); + public ResourceBase() throws IOException { servlet = RESTServlet.getInstance(); } + + public ResourceBase(String table) throws IOException { + servlet = RESTServlet.getInstance(); + this.table = table; + // Scanning the table schema is too expensive to do for every operation. + // Do it once per minute by default. + // Setting hbase.rest.transform.check.interval to <= 0 disables rescanning. + long now = System.currentTimeMillis(); + Long lastChecked = lastCheckedMap.get(table); + if (lastChecked != null) { + long interval = servlet.getConfiguration().getLong( + "hbase.rest.transform.check.interval", 60000); + if (interval > 0 && (now - lastChecked.longValue()) > interval) { + scanTransformAttrs(); + lastCheckedMap.put(table, now); + } + } else { + scanTransformAttrs(); + lastCheckedMap.put(table, now); + } + } + + /** @return the table name */ + protected String getName() { + return table; + } + + /** + * @param table + * the table + * @param family + * the column family + * @param qualifier + * the column qualifier, or null + * @return the transformation specified for the given family or qualifier, if + * any, otherwise the default + */ + protected static Transform getTransform(String table, byte[] family, + byte[] qualifier) { + if (qualifier == null) { + qualifier = HConstants.EMPTY_BYTE_ARRAY; + } + Map> familyMap = transformMap.get(table); + if (familyMap != null) { + Map columnMap = familyMap.get(family); + if (columnMap != null) { + Transform t = columnMap.get(qualifier); + // check as necessary if there is a wildcard entry + if (t == null) { + t = columnMap.get(HConstants.EMPTY_BYTE_ARRAY); + } + // if we found something, return it, otherwise we will return the + // default by falling through + if (t != null) { + return t; + } + } + } + return defaultTransform; + } + + protected synchronized static void setTransform(String table, byte[] family, + byte[] qualifier, Transform transform) { + Map> familyMap = transformMap.get(table); + if (familyMap == null) { + familyMap = new ConcurrentSkipListMap>( + Bytes.BYTES_COMPARATOR); + transformMap.put(table, familyMap); + } + Map columnMap = familyMap.get(family); + if (columnMap == null) { + columnMap = new ConcurrentSkipListMap( + Bytes.BYTES_COMPARATOR); + familyMap.put(family, columnMap); + } + // if transform is null, remove any existing entry + if (transform != null) { + columnMap.put(qualifier, transform); + } else { + columnMap.remove(qualifier); + } + } + + /** + * Scan the table schema for transform directives. These are column family + * attributes containing a comma-separated list of elements of the form + * <qualifier>:<transform-class>, where qualifier can be + * a string for exact matching or '*' as a wildcard to match anything. The + * attribute key must begin with the string "Transform$". + */ + void scanTransformAttrs() throws IOException { + HTableDescriptor htd = null; + try { + HTablePool pool = servlet.getTablePool(); + HTableInterface t = pool.getTable(table); + try { + htd = t.getTableDescriptor(); + } finally { + pool.putTable(t); + } + } catch (Exception e) { + // HTablePool#getTable throws RTE, and it doesn't really matter what + // exception got us here anyway, htd will be null below + LOG.error(e.getMessage(), e); + } + if (htd == null) { + return; + } + for (HColumnDescriptor hcd : htd.getFamilies()) { + for (Map.Entry e : hcd + .getValues().entrySet()) { + // does the key start with the transform directive tag? + String key = Bytes.toString(e.getKey().get()); + if (!key.startsWith(DIRECTIVE_KEY)) { + // no, skip + continue; + } + // match a comma separated list of one or more directives + byte[] value = e.getValue().get(); + Matcher m = DIRECTIVE_PATTERN.matcher(Bytes.toString(value)); + while (m.find()) { + byte[] qualifier = HConstants.EMPTY_BYTE_ARRAY; + String s = m.group(1); + if (s.length() > 0 && !s.equals("*")) { + qualifier = Bytes.toBytes(s); + } + String className = m.group(2); + try { + // if a transform was previously configured for the qualifier, + // this will simply replace it + setTransform(table, hcd.getName(), qualifier, (Transform) Class + .forName(className).newInstance()); + } catch (ClassNotFoundException ex) { + className = "org.apache.hadoop.hbase.rest.transform." + className; + try { + setTransform(table, hcd.getName(), qualifier, (Transform) Class + .forName(className).newInstance()); + } catch (Exception ex2) { + throw new IOException("Cannot instantiate transform", ex2); + } + } catch (Exception ex) { + throw new IOException("Cannot instantiate transform", ex); + } + } + } + } + } + + /** + * Apply any configured transformations to the value + * + * @param family + * @param qualifier + * @param value + * @param direction + * @return + * @throws IOException + */ + protected byte[] transform(byte[] family, byte[] qualifier, byte[] value, + Transform.Direction direction) throws IOException { + Transform t = getTransform(table, family, qualifier); + if (t != null) { + return t.transform(value, direction); + } + return value; + } } Index: src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteRowResource.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteRowResource.java (revision 0) +++ src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteRowResource.java (revision 0) @@ -0,0 +1,162 @@ +/* + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.rest; + +import java.io.IOException; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; +import org.apache.hadoop.hbase.client.HTablePool; +import org.apache.hadoop.hbase.rest.model.CellModel; +import org.apache.hadoop.hbase.rest.model.CellSetModel; +import org.apache.hadoop.hbase.rest.model.RowModel; + +import javax.ws.rs.Consumes; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.UriInfo; +import javax.ws.rs.core.Response.ResponseBuilder; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +public class CheckAndDeleteRowResource extends ResourceBase { + + private static final Log LOG = + LogFactory.getLog(CheckAndDeleteRowResource.class); + + CheckAndDeleteTableResource tableResource; + RowSpec rowspec; + + /** + * Constructor + * + * @param tableResource + * @param rowspec + * @param versions + * @throws IOException + */ + public CheckAndDeleteRowResource(CheckAndDeleteTableResource tableResource, + String rowspec, String versions) throws IOException { + super(); + this.tableResource = tableResource; + this.rowspec = new RowSpec(rowspec); + if (versions != null) { + this.rowspec.setMaxVersions(Integer.valueOf(versions)); + } + } + + /** + * Validates the input request parameters, parse columns from CellSetModel, + * and invoke checkAndDelete on HTable. + * + * @param model instance of CellSetModel + * @return Response 200 for OK and 304 for not-modified + */ + Response update(final CellSetModel model) { + servlet.getMetrics().incrementRequests(1); + if (servlet.isReadOnly()) { + throw new WebApplicationException(Response.Status.FORBIDDEN); + } + HTablePool pool = servlet.getTablePool(); + HTableInterface table = null; + Delete delete = null; + try { + if (model.getRows().size() != 1) { + throw new WebApplicationException(Response.Status.BAD_REQUEST); + } + RowModel rowModel = model.getRows().get(0); + byte[] key = rowModel.getKey(); + if (key == null) { + key = rowspec.getRow(); + } + if (key == null) { + throw new WebApplicationException(Response.Status.BAD_REQUEST); + } + if (rowModel.getCells().size() != 1) { + throw new WebApplicationException(Response.Status.BAD_REQUEST); + } + delete = new Delete(key); + + CellModel valueToDeleteCell = rowModel.getCells().get(0); + byte[] valueToDeleteColumn = valueToDeleteCell.getColumn(); + if (valueToDeleteColumn == null) { + try { + valueToDeleteColumn = rowspec.getColumns()[0]; + } catch (final ArrayIndexOutOfBoundsException e) { + throw new WebApplicationException(Response.Status.BAD_REQUEST); + } + } + byte[][] parts = KeyValue.parseColumn(valueToDeleteColumn); + if (parts.length == 2 && parts[1].length > 0) { + delete.deleteColumns(parts[0], parts[1]); + } else { + throw new WebApplicationException(Response.Status.BAD_REQUEST); + } + + table = pool.getTable(tableResource.getName()); + boolean retValue = table.checkAndDelete(key, parts[0], parts[1], + valueToDeleteCell.getValue(), delete); + if (LOG.isDebugEnabled()) { + LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns " + + retValue); + } + table.flushCommits(); + ResponseBuilder response = Response.ok(); + if (!retValue) { + response = Response.status(304); + } + return response.build(); + } catch (final IOException e) { + throw new WebApplicationException(e, Response.Status.SERVICE_UNAVAILABLE); + } finally { + try { + pool.putTable(table); + } catch (IOException ioe) { + throw new WebApplicationException(ioe, + Response.Status.SERVICE_UNAVAILABLE); + } + } + } + + @PUT + @Consumes({ MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF }) + public Response put(final CellSetModel model, final @Context UriInfo uriInfo) { + if (LOG.isDebugEnabled()) { + LOG.debug("PUT " + uriInfo.getAbsolutePath()); + } + return update(model); + } + + @POST + @Consumes({ MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF }) + public Response post(final CellSetModel model, final @Context UriInfo uriInfo) { + if (LOG.isDebugEnabled()) { + LOG.debug("POST " + uriInfo.getAbsolutePath()); + } + return update(model); + } +} Index: src/main/java/org/apache/hadoop/hbase/rest/RootResource.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/rest/RootResource.java (revision 1230294) +++ src/main/java/org/apache/hadoop/hbase/rest/RootResource.java (working copy) @@ -103,4 +103,16 @@ final @PathParam("table") String table) throws IOException { return new TableResource(table); } + + @Path("checkandput/{table}") + public CheckAndPutTableResource getCheckAndPutTableResource( + final @PathParam("table") String table) throws IOException { + return new CheckAndPutTableResource(table); + } + + @Path("checkanddelete/{table}") + public CheckAndDeleteTableResource getCheckAndDeleteTableResource( + final @PathParam("table") String table) throws IOException { + return new CheckAndDeleteTableResource(table); + } } Index: src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java (revision 1230294) +++ src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java (working copy) @@ -584,12 +584,82 @@ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, byte[] value, Put put) throws IOException { - throw new IOException("checkAndPut not supported"); + // column to check-the-value + put.add(new KeyValue(row, family, qualifier, value)); + + CellSetModel model = buildModelFromPut(put); + StringBuilder sb = new StringBuilder(); + sb.append('/'); + if (accessToken != null) { + sb.append(accessToken); + sb.append('/'); + } + sb.append("checkandput"); + sb.append('/'); + sb.append(Bytes.toStringBinary(name)); + sb.append('/'); + sb.append(Bytes.toStringBinary(put.getRow())); + + for (int i = 0; i < maxRetries; i++) { + Response response = client.put(sb.toString(), + Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); + int code = response.getCode(); + switch (code) { + case 200: + return true; + case 304: // NOT-MODIFIED + return false; + case 509: + try { + Thread.sleep(sleepTime); + } catch (final InterruptedException e) { + } + break; + default: + throw new IOException("checkAndPut request failed with " + code); + } + } + throw new IOException("checkAndPut request timed out"); } public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete) throws IOException { - throw new IOException("checkAndDelete not supported"); + Put put = new Put(row); + // column to check-the-value + put.add(new KeyValue(row, family, qualifier, value)); + CellSetModel model = buildModelFromPut(put); + StringBuilder sb = new StringBuilder(); + sb.append('/'); + if (accessToken != null) { + sb.append(accessToken); + sb.append('/'); + } + sb.append("checkanddelete"); + sb.append('/'); + sb.append(Bytes.toStringBinary(name)); + sb.append('/'); + sb.append(Bytes.toStringBinary(row)); + + for (int i = 0; i < maxRetries; i++) { + Response response = client.put(sb.toString(), + Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); + int code = response.getCode(); + switch (code) { + case 200: + return true; + case 304: // NOT-MODIFIED + return false; + case 509: + try { + Thread.sleep(sleepTime); + } catch (final InterruptedException e) { + } + break; + default: + throw new IOException("checkAndDelete request failed with " + code); + } + } + throw new IOException("checkAndDelete request timed out"); } public Result increment(Increment increment) throws IOException {