Index: src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutTableResource.java
===================================================================
--- src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutTableResource.java (revision 0)
+++ src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutTableResource.java (revision 0)
@@ -0,0 +1,245 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentSkipListMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.ws.rs.Encoded;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.rest.transform.NullTransform;
+import org.apache.hadoop.hbase.rest.transform.Transform;
+import org.apache.hadoop.hbase.util.Bytes;
+
+public class CheckAndPutTableResource extends ResourceBase {
+
+ /**
+ * HCD attributes starting with this string are considered transform
+ * directives
+ */
+ private static final String DIRECTIVE_KEY = "Transform$";
+
+ /**
+ * Transform directives are of the form <qualifier>:<class>
+ * where qualifier is a string for exact matching or '*' as a wildcard
+ * that will match anything; and class is either the fully qualified
+ * class name of a transform implementation or can be the short name of a
+ * transform in the org.apache.hadoop.hbase.rest.transform package.
+ */
+ private static final Pattern DIRECTIVE_PATTERN =
+ Pattern.compile("([^\\:]+)\\:([^\\,]+)\\,?");
+ private static final Transform defaultTransform = new NullTransform();
+ private static final
+ Map>> transformMap =
+ new ConcurrentHashMap>>();
+ private static final Map lastCheckedMap =
+ new ConcurrentHashMap();
+
+ /**
+ * @param table the table
+ * @param family the column family
+ * @param qualifier the column qualifier, or null
+ * @return the transformation specified for the given family or qualifier, if
+ * any, otherwise the default
+ */
+ static Transform getTransform(String table, byte[] family, byte[] qualifier) {
+ if (qualifier == null) {
+ qualifier = HConstants.EMPTY_BYTE_ARRAY;
+ }
+ Map> familyMap = transformMap.get(table);
+ if (familyMap != null) {
+ Map columnMap = familyMap.get(family);
+ if (columnMap != null) {
+ Transform t = columnMap.get(qualifier);
+ // check as necessary if there is a wildcard entry
+ if (t == null) {
+ t = columnMap.get(HConstants.EMPTY_BYTE_ARRAY);
+ }
+ // if we found something, return it, otherwise we will return the
+ // default by falling through
+ if (t != null) {
+ return t;
+ }
+ }
+ }
+ return defaultTransform;
+ }
+
+ synchronized static void setTransform(String table, byte[] family,
+ byte[] qualifier, Transform transform) {
+ Map> familyMap = transformMap.get(table);
+ if (familyMap == null) {
+ familyMap = new ConcurrentSkipListMap>(
+ Bytes.BYTES_COMPARATOR);
+ transformMap.put(table, familyMap);
+ }
+ Map columnMap = familyMap.get(family);
+ if (columnMap == null) {
+ columnMap = new ConcurrentSkipListMap(
+ Bytes.BYTES_COMPARATOR);
+ familyMap.put(family, columnMap);
+ }
+ // if transform is null, remove any existing entry
+ if (transform != null) {
+ columnMap.put(qualifier, transform);
+ } else {
+ columnMap.remove(qualifier);
+ }
+ }
+
+ String table;
+
+ /**
+ * Scan the table schema for transform directives. These are column family
+ * attributes containing a comma-separated list of elements of the form
+ * <qualifier>:<transform-class>, where qualifier
+ * can be a string for exact matching or '*' as a wildcard to match anything.
+ * The attribute key must begin with the string "Transform$".
+ */
+ void scanTransformAttrs() throws IOException {
+ HTableDescriptor htd = null;
+ try {
+ HTablePool pool = servlet.getTablePool();
+ HTableInterface t = pool.getTable(table);
+ try {
+ htd = t.getTableDescriptor();
+ } finally {
+ pool.putTable(t);
+ }
+ } catch (Exception e) {
+ // HTablePool#getTable throws RTE, and it doesn't really matter what
+ // exception got us here anyway, htd will be null below
+ }
+ if (htd == null) {
+ return;
+ }
+ for (HColumnDescriptor hcd: htd.getFamilies()) {
+ for (Map.Entry e:
+ hcd.getValues().entrySet()) {
+ // does the key start with the transform directive tag?
+ String key = Bytes.toString(e.getKey().get());
+ if (!key.startsWith(DIRECTIVE_KEY)) {
+ // no, skip
+ continue;
+ }
+ // match a comma separated list of one or more directives
+ byte[] value = e.getValue().get();
+ Matcher m = DIRECTIVE_PATTERN.matcher(Bytes.toString(value));
+ while (m.find()) {
+ byte[] qualifier = HConstants.EMPTY_BYTE_ARRAY;
+ String s = m.group(1);
+ if (s.length() > 0 && !s.equals("*")) {
+ qualifier = Bytes.toBytes(s);
+ }
+ String className = m.group(2);
+ try {
+ // if a transform was previously configured for the qualifier,
+ // this will simply replace it
+ setTransform(table, hcd.getName(), qualifier,
+ (Transform)Class.forName(className).newInstance());
+ } catch (ClassNotFoundException ex) {
+ className = "org.apache.hadoop.hbase.rest.transform." + className;
+ try {
+ setTransform(table, hcd.getName(), qualifier,
+ (Transform)Class.forName(className).newInstance());
+ } catch (Exception ex2) {
+ throw new IOException("Cannot instantiate transform", ex2);
+ }
+ } catch (Exception ex) {
+ throw new IOException("Cannot instantiate transform", ex);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Constructor
+ * @param table
+ * @throws IOException
+ */
+ public CheckAndPutTableResource(String table) throws IOException {
+ super();
+ this.table = table;
+ // Scanning the table schema is too expensive to do for every operation.
+ // Do it once per minute by default.
+ // Setting hbase.rest.transform.check.interval to <= 0 disables rescanning.
+ long now = System.currentTimeMillis();
+ Long lastChecked = lastCheckedMap.get(table);
+ if (lastChecked != null) {
+ long interval = servlet.getConfiguration()
+ .getLong("hbase.rest.transform.check.interval", 60000);
+ if (interval > 0 && (now - lastChecked.longValue()) > interval) {
+ scanTransformAttrs();
+ lastCheckedMap.put(table, now);
+ }
+ } else {
+ scanTransformAttrs();
+ lastCheckedMap.put(table, now);
+ }
+ }
+
+ /** @return the table name */
+ String getName() {
+ return table;
+ }
+
+ /**
+ * Apply any configured transformations to the value
+ * @param family
+ * @param qualifier
+ * @param value
+ * @param direction
+ * @return
+ * @throws IOException
+ */
+ byte[] transform(byte[] family, byte[] qualifier, byte[] value,
+ Transform.Direction direction) throws IOException {
+ Transform t = getTransform(table, family, qualifier);
+ if (t != null) {
+ return t.transform(value, direction);
+ }
+ return value;
+ }
+
+
+ @Path("{rowspec: .+}")
+ public CheckAndPutRowResource getRowResource(
+ // We need the @Encoded decorator so Jersey won't urldecode before
+ // the RowSpec constructor has a chance to parse
+ final @PathParam("rowspec") @Encoded String rowspec,
+ final @QueryParam("v") String versions) throws IOException {
+ return new CheckAndPutRowResource(this, rowspec, versions);
+ }
+}
Index: src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutRowResource.java
===================================================================
--- src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutRowResource.java (revision 0)
+++ src/main/java/org/apache/hadoop/hbase/rest/CheckAndPutRowResource.java (revision 0)
@@ -0,0 +1,174 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+import java.util.List;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import javax.ws.rs.core.Response.ResponseBuilder;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.rest.model.CellModel;
+import org.apache.hadoop.hbase.rest.model.CellSetModel;
+import org.apache.hadoop.hbase.rest.model.RowModel;
+import org.apache.hadoop.hbase.rest.transform.Transform;
+import org.apache.hadoop.hbase.util.Bytes;
+
+public class CheckAndPutRowResource extends ResourceBase {
+ private static final Log LOG = LogFactory.getLog(CheckAndPutRowResource.class);
+
+ CheckAndPutTableResource tableResource;
+ RowSpec rowspec;
+
+ /**
+ * Constructor
+ * @param tableResource
+ * @param rowspec
+ * @param versions
+ * @throws IOException
+ */
+ public CheckAndPutRowResource(CheckAndPutTableResource tableResource, String rowspec,
+ String versions) throws IOException {
+ super();
+ this.tableResource = tableResource;
+ this.rowspec = new RowSpec(rowspec);
+ if (versions != null) {
+ this.rowspec.setMaxVersions(Integer.valueOf(versions));
+ }
+ }
+
+ Response update(final CellSetModel model, final boolean replace) {
+ servlet.getMetrics().incrementRequests(1);
+ if (servlet.isReadOnly()) {
+ throw new WebApplicationException(Response.Status.FORBIDDEN);
+ }
+ HTablePool pool = servlet.getTablePool();
+ HTableInterface table = null;
+ try {
+ RowModel rowModel = model.getRows().get(0);
+ byte[] key = rowModel.getKey();
+ if (key == null) {
+ key = rowspec.getRow();
+ }
+ if (key == null) {
+ throw new WebApplicationException(Response.Status.BAD_REQUEST);
+ }
+
+ if(rowModel.getCells().size() < 2){
+ throw new WebApplicationException(Response.Status.BAD_REQUEST);
+ }
+
+ Put put = new Put(key);
+
+ CellModel valueToPutCell = rowModel.getCells().get(0);
+ byte[] valueToPutColumn = valueToPutCell.getColumn();
+ if (valueToPutColumn == null){
+ try {
+ valueToPutColumn = rowspec.getColumns()[0];
+ } catch (final ArrayIndexOutOfBoundsException e) {
+ throw new WebApplicationException(Response.Status.BAD_REQUEST);
+ }
+ }
+
+ byte[][] valueToPutParts = KeyValue.parseColumn(valueToPutColumn);
+ if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) {
+ put.add(valueToPutParts[0],
+ valueToPutParts[1],
+ valueToPutCell.getTimestamp(),
+ tableResource.transform(valueToPutParts[0], valueToPutParts[1],
+ valueToPutCell.getValue(), Transform.Direction.IN));
+ } else {
+ throw new WebApplicationException(Response.Status.BAD_REQUEST);
+ }
+
+ CellModel valueToCheckCell = rowModel.getCells().get(1);
+ byte[] valueToCheckColumn = valueToCheckCell.getColumn();
+ if (valueToCheckColumn == null){
+ try {
+ valueToCheckColumn = rowspec.getColumns()[1];
+ } catch (final ArrayIndexOutOfBoundsException e) {
+ throw new WebApplicationException(Response.Status.BAD_REQUEST);
+ }
+ }
+
+ table = pool.getTable(tableResource.getName());
+ ((HTable) table).setAutoFlush(false);
+ boolean retValue = table.checkAndPut(key, valueToPutParts[0], valueToPutParts[1], valueToCheckCell.getValue(), put);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
+ }
+ ((HTable) table).setAutoFlush(true);
+ table.flushCommits();
+ ResponseBuilder response = Response.ok();
+ if(!retValue){
+ response = Response.status(304);
+ }
+ return response.build();
+ } catch (final IOException e) {
+ throw new WebApplicationException(e,
+ Response.Status.SERVICE_UNAVAILABLE);
+ } finally {
+ if (table != null) {
+ pool.putTable(table);
+ }
+ }
+ }
+
+ @PUT
+ @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
+ public Response put(final CellSetModel model,
+ final @Context UriInfo uriInfo) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ }
+ return update(model, true);
+ }
+
+ @POST
+ @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
+ public Response post(final CellSetModel model,
+ final @Context UriInfo uriInfo) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("POST " + uriInfo.getAbsolutePath());
+ }
+ return update(model, false);
+ }
+
+}
Index: src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteTableResource.java
===================================================================
--- src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteTableResource.java (revision 0)
+++ src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteTableResource.java (revision 0)
@@ -0,0 +1,244 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentSkipListMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.ws.rs.Encoded;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.rest.transform.NullTransform;
+import org.apache.hadoop.hbase.rest.transform.Transform;
+import org.apache.hadoop.hbase.util.Bytes;
+
+public class CheckAndDeleteTableResource extends ResourceBase {
+
+ /**
+ * HCD attributes starting with this string are considered transform
+ * directives
+ */
+ private static final String DIRECTIVE_KEY = "Transform$";
+
+ /**
+ * Transform directives are of the form <qualifier>:<class>
+ * where qualifier is a string for exact matching or '*' as a wildcard
+ * that will match anything; and class is either the fully qualified
+ * class name of a transform implementation or can be the short name of a
+ * transform in the org.apache.hadoop.hbase.rest.transform package.
+ */
+ private static final Pattern DIRECTIVE_PATTERN =
+ Pattern.compile("([^\\:]+)\\:([^\\,]+)\\,?");
+ private static final Transform defaultTransform = new NullTransform();
+ private static final
+ Map>> transformMap =
+ new ConcurrentHashMap>>();
+ private static final Map lastCheckedMap =
+ new ConcurrentHashMap();
+
+ /**
+ * @param table the table
+ * @param family the column family
+ * @param qualifier the column qualifier, or null
+ * @return the transformation specified for the given family or qualifier, if
+ * any, otherwise the default
+ */
+ static Transform getTransform(String table, byte[] family, byte[] qualifier) {
+ if (qualifier == null) {
+ qualifier = HConstants.EMPTY_BYTE_ARRAY;
+ }
+ Map> familyMap = transformMap.get(table);
+ if (familyMap != null) {
+ Map columnMap = familyMap.get(family);
+ if (columnMap != null) {
+ Transform t = columnMap.get(qualifier);
+ // check as necessary if there is a wildcard entry
+ if (t == null) {
+ t = columnMap.get(HConstants.EMPTY_BYTE_ARRAY);
+ }
+ // if we found something, return it, otherwise we will return the
+ // default by falling through
+ if (t != null) {
+ return t;
+ }
+ }
+ }
+ return defaultTransform;
+ }
+
+ synchronized static void setTransform(String table, byte[] family,
+ byte[] qualifier, Transform transform) {
+ Map> familyMap = transformMap.get(table);
+ if (familyMap == null) {
+ familyMap = new ConcurrentSkipListMap>(
+ Bytes.BYTES_COMPARATOR);
+ transformMap.put(table, familyMap);
+ }
+ Map columnMap = familyMap.get(family);
+ if (columnMap == null) {
+ columnMap = new ConcurrentSkipListMap(
+ Bytes.BYTES_COMPARATOR);
+ familyMap.put(family, columnMap);
+ }
+ // if transform is null, remove any existing entry
+ if (transform != null) {
+ columnMap.put(qualifier, transform);
+ } else {
+ columnMap.remove(qualifier);
+ }
+ }
+
+ String table;
+
+ /**
+ * Scan the table schema for transform directives. These are column family
+ * attributes containing a comma-separated list of elements of the form
+ * <qualifier>:<transform-class>, where qualifier
+ * can be a string for exact matching or '*' as a wildcard to match anything.
+ * The attribute key must begin with the string "Transform$".
+ */
+ void scanTransformAttrs() throws IOException {
+ HTableDescriptor htd = null;
+ try {
+ HTablePool pool = servlet.getTablePool();
+ HTableInterface t = pool.getTable(table);
+ try {
+ htd = t.getTableDescriptor();
+ } finally {
+ pool.putTable(t);
+ }
+ } catch (Exception e) {
+ // HTablePool#getTable throws RTE, and it doesn't really matter what
+ // exception got us here anyway, htd will be null below
+ }
+ if (htd == null) {
+ return;
+ }
+ for (HColumnDescriptor hcd: htd.getFamilies()) {
+ for (Map.Entry e:
+ hcd.getValues().entrySet()) {
+ // does the key start with the transform directive tag?
+ String key = Bytes.toString(e.getKey().get());
+ if (!key.startsWith(DIRECTIVE_KEY)) {
+ // no, skip
+ continue;
+ }
+ // match a comma separated list of one or more directives
+ byte[] value = e.getValue().get();
+ Matcher m = DIRECTIVE_PATTERN.matcher(Bytes.toString(value));
+ while (m.find()) {
+ byte[] qualifier = HConstants.EMPTY_BYTE_ARRAY;
+ String s = m.group(1);
+ if (s.length() > 0 && !s.equals("*")) {
+ qualifier = Bytes.toBytes(s);
+ }
+ String className = m.group(2);
+ try {
+ // if a transform was previously configured for the qualifier,
+ // this will simply replace it
+ setTransform(table, hcd.getName(), qualifier,
+ (Transform)Class.forName(className).newInstance());
+ } catch (ClassNotFoundException ex) {
+ className = "org.apache.hadoop.hbase.rest.transform." + className;
+ try {
+ setTransform(table, hcd.getName(), qualifier,
+ (Transform)Class.forName(className).newInstance());
+ } catch (Exception ex2) {
+ throw new IOException("Cannot instantiate transform", ex2);
+ }
+ } catch (Exception ex) {
+ throw new IOException("Cannot instantiate transform", ex);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Constructor
+ * @param table
+ * @throws IOException
+ */
+ public CheckAndDeleteTableResource(String table) throws IOException {
+ super();
+ this.table = table;
+ // Scanning the table schema is too expensive to do for every operation.
+ // Do it once per minute by default.
+ // Setting hbase.rest.transform.check.interval to <= 0 disables rescanning.
+ long now = System.currentTimeMillis();
+ Long lastChecked = lastCheckedMap.get(table);
+ if (lastChecked != null) {
+ long interval = servlet.getConfiguration()
+ .getLong("hbase.rest.transform.check.interval", 60000);
+ if (interval > 0 && (now - lastChecked.longValue()) > interval) {
+ scanTransformAttrs();
+ lastCheckedMap.put(table, now);
+ }
+ } else {
+ scanTransformAttrs();
+ lastCheckedMap.put(table, now);
+ }
+ }
+
+ /** @return the table name */
+ String getName() {
+ return table;
+ }
+
+ /**
+ * Apply any configured transformations to the value
+ * @param family
+ * @param qualifier
+ * @param value
+ * @param direction
+ * @return
+ * @throws IOException
+ */
+ byte[] transform(byte[] family, byte[] qualifier, byte[] value,
+ Transform.Direction direction) throws IOException {
+ Transform t = getTransform(table, family, qualifier);
+ if (t != null) {
+ return t.transform(value, direction);
+ }
+ return value;
+ }
+
+ @Path("{rowspec: .+}")
+ public CheckAndDeleteRowResource getRowResource(
+ // We need the @Encoded decorator so Jersey won't urldecode before
+ // the RowSpec constructor has a chance to parse
+ final @PathParam("rowspec") @Encoded String rowspec,
+ final @QueryParam("v") String versions) throws IOException {
+ return new CheckAndDeleteRowResource(this, rowspec, versions);
+ }
+}
Index: src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteRowResource.java
===================================================================
--- src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteRowResource.java (revision 0)
+++ src/main/java/org/apache/hadoop/hbase/rest/CheckAndDeleteRowResource.java (revision 0)
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+import java.util.List;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import javax.ws.rs.core.Response.ResponseBuilder;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.HTablePool;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.rest.model.CellModel;
+import org.apache.hadoop.hbase.rest.model.CellSetModel;
+import org.apache.hadoop.hbase.rest.model.RowModel;
+import org.apache.hadoop.hbase.rest.transform.Transform;
+import org.apache.hadoop.hbase.util.Bytes;
+
+public class CheckAndDeleteRowResource extends ResourceBase {
+ private static final Log LOG = LogFactory.getLog(CheckAndDeleteRowResource.class);
+
+ CheckAndDeleteTableResource tableResource;
+ RowSpec rowspec;
+
+ /**
+ * Constructor
+ * @param tableResource
+ * @param rowspec
+ * @param versions
+ * @throws IOException
+ */
+ public CheckAndDeleteRowResource(CheckAndDeleteTableResource tableResource, String rowspec,
+ String versions) throws IOException {
+ super();
+ this.tableResource = tableResource;
+ this.rowspec = new RowSpec(rowspec);
+ if (versions != null) {
+ this.rowspec.setMaxVersions(Integer.valueOf(versions));
+ }
+ }
+
+ Response update(final CellSetModel model, final boolean replace) {
+ servlet.getMetrics().incrementRequests(1);
+ if (servlet.isReadOnly()) {
+ throw new WebApplicationException(Response.Status.FORBIDDEN);
+ }
+ HTablePool pool = servlet.getTablePool();
+ HTableInterface table = null;
+ Delete delete = null;
+ try {
+ RowModel rowModel = model.getRows().get(0);
+ byte[] key = rowModel.getKey();
+ if (key == null) {
+ key = rowspec.getRow();
+ }
+ if (key == null) {
+ throw new WebApplicationException(Response.Status.BAD_REQUEST);
+ }
+ if(rowModel.getCells().size() < 2){
+ throw new WebApplicationException(Response.Status.BAD_REQUEST);
+ }
+ delete = new Delete(key);
+
+ CellModel valueToDeleteCell = rowModel.getCells().get(0);
+ byte[] valueToDeleteColumn = valueToDeleteCell.getColumn();
+ if (valueToDeleteColumn == null) {
+ try {
+ valueToDeleteColumn = rowspec.getColumns()[0];
+ } catch (final ArrayIndexOutOfBoundsException e) {
+ throw new WebApplicationException(
+ Response.Status.BAD_REQUEST);
+ }
+ }
+ byte[][] parts = KeyValue.parseColumn(valueToDeleteColumn);
+ if (parts.length == 2 && parts[1].length > 0) {
+ delete.deleteColumns(parts[0], parts[1]);
+ } else {
+ throw new WebApplicationException(Response.Status.BAD_REQUEST);
+ }
+
+ CellModel valueToCheckCell = rowModel.getCells().get(1);
+ byte[] valueToCheckColumn = valueToCheckCell.getColumn();
+ if (valueToCheckColumn == null) {
+ try {
+ valueToCheckColumn = rowspec.getColumns()[1];
+ } catch (final ArrayIndexOutOfBoundsException e) {
+ throw new WebApplicationException(
+ Response.Status.BAD_REQUEST);
+ }
+ }
+
+ table = pool.getTable(tableResource.getName());
+ ((HTable) table).setAutoFlush(false);
+ boolean retValue = table.checkAndDelete(key, parts[0], parts[1], valueToCheckCell.getValue(), delete);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns " + retValue);
+ }
+ ((HTable) table).setAutoFlush(true);
+ table.flushCommits();
+ ResponseBuilder response = Response.ok();
+ if(!retValue){
+ response = Response.status(304);
+ }
+ return response.build();
+ } catch (final IOException e) {
+ throw new WebApplicationException(e,
+ Response.Status.SERVICE_UNAVAILABLE);
+ } finally {
+ if (table != null) {
+ pool.putTable(table);
+ }
+ }
+ }
+
+ @PUT
+ @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
+ public Response put(final CellSetModel model,
+ final @Context UriInfo uriInfo) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ }
+ return update(model, true);
+ }
+
+ @POST
+ @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
+ public Response post(final CellSetModel model,
+ final @Context UriInfo uriInfo) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("POST " + uriInfo.getAbsolutePath());
+ }
+ return update(model, false);
+ }
+ }
Index: src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
===================================================================
--- src/main/java/org/apache/hadoop/hbase/rest/RootResource.java (revision 1215035)
+++ src/main/java/org/apache/hadoop/hbase/rest/RootResource.java (working copy)
@@ -101,4 +101,16 @@
final @PathParam("table") String table) throws IOException {
return new TableResource(table);
}
+
+ @Path("checkandput/{table}")
+ public CheckAndPutTableResource getCheckAndPutTableResource(
+ final @PathParam("table") String table) throws IOException {
+ return new CheckAndPutTableResource(table);
+ }
+
+ @Path("checkanddelete/{table}")
+ public CheckAndDeleteTableResource getCheckAndDeleteTableResource(
+ final @PathParam("table") String table) throws IOException {
+ return new CheckAndDeleteTableResource(table);
+ }
}
Index: src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
===================================================================
--- src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java (revision 1215035)
+++ src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java (working copy)
@@ -581,12 +581,82 @@
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Put put) throws IOException {
- throw new IOException("checkAndPut not supported");
+ //column to check-the-value
+ put.add(new KeyValue(row, family, qualifier, value));
+
+ CellSetModel model = buildModelFromPut(put);
+ StringBuilder sb = new StringBuilder();
+ sb.append('/');
+ if (accessToken != null) {
+ sb.append(accessToken);
+ sb.append('/');
+ }
+ sb.append("checkandput");
+ sb.append('/');
+ sb.append(Bytes.toStringBinary(name));
+ sb.append('/');
+ sb.append(Bytes.toStringBinary(put.getRow()));
+
+ for (int i = 0; i < maxRetries; i++) {
+ Response response = client.put(sb.toString(), Constants.MIMETYPE_PROTOBUF,
+ model.createProtobufOutput());
+ int code = response.getCode();
+ switch (code) {
+ case 200:
+ return true;
+ case 304: //NOT-MODIFIED
+ return false;
+ case 509:
+ try {
+ Thread.sleep(sleepTime);
+ } catch (final InterruptedException e) { }
+ break;
+ default:
+ throw new IOException("checkAndPut request failed with " + code);
+ }
+ }
+ throw new IOException("checkAndPut request timed out");
}
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Delete delete) throws IOException {
- throw new IOException("checkAndDelete not supported");
+ Put put = new Put(row);
+ //column to be deleted
+ put.add(new KeyValue(row, family, qualifier));
+ //column to check-the-value
+ put.add(new KeyValue(row, family, qualifier, value));
+ CellSetModel model = buildModelFromPut(put);
+ StringBuilder sb = new StringBuilder();
+ sb.append('/');
+ if (accessToken != null) {
+ sb.append(accessToken);
+ sb.append('/');
+ }
+ sb.append("checkanddelete");
+ sb.append('/');
+ sb.append(Bytes.toStringBinary(name));
+ sb.append('/');
+ sb.append(Bytes.toStringBinary(row));
+
+ for (int i = 0; i < maxRetries; i++) {
+ Response response = client.put(sb.toString(), Constants.MIMETYPE_PROTOBUF,
+ model.createProtobufOutput());
+ int code = response.getCode();
+ switch (code) {
+ case 200:
+ return true;
+ case 304: //NOT-MODIFIED
+ return false;
+ case 509:
+ try {
+ Thread.sleep(sleepTime);
+ } catch (final InterruptedException e) { }
+ break;
+ default:
+ throw new IOException("checkAndDelete request failed with " + code);
+ }
+ }
+ throw new IOException("checkAndDelete request timed out");
}