Index: src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java (revision 1524855) +++ src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java (working copy) @@ -34,6 +34,8 @@ import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.WritableByteArrayComparable; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.util.StringUtils; @@ -690,6 +692,11 @@ throw new IOException("checkAndPut request timed out"); } + public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, + CompareFilter.CompareOp compareOp, WritableByteArrayComparable comparator, Put put) throws IOException { + throw new IOException("checkAndPut with comparator not supported"); + } + public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete) throws IOException { Put put = new Put(row); @@ -725,6 +732,12 @@ throw new IOException("checkAndDelete request timed out"); } + @Override + public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, + CompareFilter.CompareOp compareOp, WritableByteArrayComparable comparator, Delete delete) throws IOException { + throw new IOException("checkAndDelete with comparator not supported"); + } + public Result increment(Increment increment) throws IOException { throw new IOException("Increment not supported"); } Index: src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java (revision 1524855) +++ src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java (working copy) @@ -28,6 +28,8 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.WritableByteArrayComparable; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; /** @@ -226,6 +228,23 @@ byte[] value, Put put) throws IOException; /** + * Atomically checks if a row/family/qualifier value matches the expected + * value. If it does, it adds the put. If the passed value is null, the check + * is for the lack of column (ie: non-existance) + * + * @param row to check + * @param family column family to check + * @param qualifier column qualifier to check + * @param compareOp the comparison operator + * @param comparator the value to compare against + * @param put data to put if check succeeds + * @throws IOException e + * @return true if the new put was executed, false otherwise + */ + boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, + CompareFilter.CompareOp compareOp, WritableByteArrayComparable comparator, Put put) throws IOException; + + /** * Deletes the specified cells/row. * * @param delete The object that specifies what to delete. @@ -264,6 +283,23 @@ byte[] value, Delete delete) throws IOException; /** + * Atomically checks if a row/family/qualifier value matches the expected + * value. If it does, it adds the delete. If the passed value is null, the + * check is for the lack of column (ie: non-existance) + * + * @param row to check + * @param family column family to check + * @param qualifier column qualifier to check + * @param compareOp the comparison operator + * @param comparator the value to compare against + * @param delete data to delete if check succeeds + * @throws IOException e + * @return true if the new delete was executed, false otherwise + */ + boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, + CompareFilter.CompareOp compareOp, WritableByteArrayComparable comparator, Delete delete) throws IOException; + + /** * Performs multiple mutations atomically on a single row. Currently * {@link Put} and {@link Delete} are supported. * Index: src/main/java/org/apache/hadoop/hbase/client/HTablePool.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/client/HTablePool.java (revision 1524855) +++ src/main/java/org/apache/hadoop/hbase/client/HTablePool.java (working copy) @@ -30,6 +30,8 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.WritableByteArrayComparable; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.PoolMap; @@ -407,6 +409,12 @@ } @Override + public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, + CompareFilter.CompareOp compareOp, WritableByteArrayComparable comparator, Put put) throws IOException { + return table.checkAndPut(row, family, qualifier, compareOp, comparator, put); + } + + @Override public void delete(Delete delete) throws IOException { table.delete(delete); } @@ -423,6 +431,12 @@ } @Override + public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, + CompareFilter.CompareOp compareOp, WritableByteArrayComparable comparator, Delete delete) throws IOException { + return table.checkAndDelete(row, family, qualifier, compareOp, comparator, delete); + } + + @Override public Result increment(Increment increment) throws IOException { return table.increment(increment); } Index: src/main/java/org/apache/hadoop/hbase/client/HTable.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/client/HTable.java (revision 1524855) +++ src/main/java/org/apache/hadoop/hbase/client/HTable.java (working copy) @@ -48,6 +48,8 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.HConnectionManager.HConnectable; import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.WritableByteArrayComparable; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.hbase.ipc.ExecRPCInvoker; import org.apache.hadoop.hbase.util.Addressing; @@ -947,6 +949,21 @@ }.withRetries(); } + /** + * {@inheritDoc} + */ + @Override + public boolean checkAndPut(final byte[] row, + final byte[] family, final byte[] qualifier, final CompareFilter.CompareOp compareOp, + final WritableByteArrayComparable comparator, final Put put) + throws IOException { + return new ServerCallable(connection, tableName, row, operationTimeout) { + public Boolean call() throws IOException { + return server.checkAndPut(location.getRegionInfo().getRegionName(), + row, family, qualifier, compareOp, comparator, put) ? Boolean.TRUE : Boolean.FALSE; + } + }.withRetries(); + } /** * {@inheritDoc} @@ -970,6 +987,24 @@ * {@inheritDoc} */ @Override + public boolean checkAndDelete(final byte[] row, + final byte[] family, final byte[] qualifier, final CompareFilter.CompareOp compareOp, + final WritableByteArrayComparable comparator, final Delete delete) + throws IOException { + return new ServerCallable(connection, tableName, row, operationTimeout) { + public Boolean call() throws IOException { + return server.checkAndDelete( + location.getRegionInfo().getRegionName(), + row, family, qualifier, compareOp, comparator, delete) + ? Boolean.TRUE : Boolean.FALSE; + } + }.withRetries(); + } + + /** + * {@inheritDoc} + */ + @Override public boolean exists(final Get get) throws IOException { return new ServerCallable(connection, tableName, get.getRow(), operationTimeout) { public Boolean call() throws IOException { Index: src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java (revision 1524855) +++ src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java (working copy) @@ -57,6 +57,8 @@ import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.WritableByteArrayComparable; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CoprocessorClassLoader; @@ -436,11 +438,21 @@ return table.checkAndPut(row, family, qualifier, value, put); } + public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, + CompareFilter.CompareOp compareOp, WritableByteArrayComparable comparator, Put put) throws IOException { + return table.checkAndPut(row, family, qualifier, compareOp, comparator, put); + } + public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete) throws IOException { return table.checkAndDelete(row, family, qualifier, value, delete); } + public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, + CompareFilter.CompareOp compareOp, WritableByteArrayComparable comparator, Delete delete) throws IOException { + return table.checkAndDelete(row, family, qualifier, compareOp, comparator, delete); + } + public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException { return table.incrementColumnValue(row, family, qualifier, amount);