Index: conf/hbase-default.xml
===================================================================
--- conf/hbase-default.xml (revision 885113)
+++ conf/hbase-default.xml (working copy)
@@ -137,6 +137,17 @@
+ hbase.client.keyvalue.maxsize
+ -1
+ Specifies the combined maximum allowed size of a KeyValue
+ instance. This is to set an upper boundary for a single entry saved in a
+ storage file. Since they cannot be split it helps avoiding that a region
+ cannot be split any further because the data is too large. It seems wise
+ to set this to a fraction of the maximum region size. Setting it to zero
+ or less disables the check.
+
+
+
hbase.regionserver.lease.period
60000
HRegion server lease period in milliseconds. Default is
Index: src/java/org/apache/hadoop/hbase/client/HTable.java
===================================================================
--- src/java/org/apache/hadoop/hbase/client/HTable.java (revision 885113)
+++ src/java/org/apache/hadoop/hbase/client/HTable.java (working copy)
@@ -36,6 +36,7 @@
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor;
@@ -62,6 +63,7 @@
private boolean autoFlush;
private long currentWriteBufferSize;
protected int scannerCaching;
+ private int maxKeyValueSize;
/**
* Creates an object to access a HBase table
@@ -121,6 +123,7 @@
this.autoFlush = true;
this.currentWriteBufferSize = 0;
this.scannerCaching = conf.getInt("hbase.client.scanner.caching", 1);
+ this.maxKeyValueSize = conf.getInt("hbase.client.keyvalue.maxsize", -1);
}
/**
@@ -602,9 +605,18 @@
* @throws IllegalArgumentException
*/
private void validatePut(final Put put) throws IllegalArgumentException{
- if(put.isEmpty()) {
+ if (put.isEmpty()) {
throw new IllegalArgumentException("No columns to insert");
}
+ if (maxKeyValueSize > 0) {
+ for (List list : put.getFamilyMap().values()) {
+ for (KeyValue kv : list) {
+ if (kv.getLength() > maxKeyValueSize) {
+ throw new IllegalArgumentException("KeyValue size too large");
+ }
+ }
+ }
+ }
}
/**
Index: src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java
===================================================================
--- src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java (revision 885113)
+++ src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java (working copy)
@@ -32,6 +32,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -39,8 +40,6 @@
import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.TableExistsException;
-import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter;
@@ -401,6 +400,26 @@
}
@Test
+ public void testMaxKeyValueSize() throws Exception {
+ byte [] TABLE = Bytes.toBytes("testMaxKeyValueSize");
+ HBaseConfiguration conf = TEST_UTIL.getConfiguration();
+ String oldMaxSize = conf.get("hbase.client.keyvalue.maxsize");
+ HTable ht = TEST_UTIL.createTable(TABLE, FAMILY);
+ byte[] value = new byte[4 * 1024 * 1024];
+ Put put = new Put(ROW);
+ put.add(FAMILY, QUALIFIER, value);
+ ht.put(put);
+ try {
+ conf.setInt("hbase.client.keyvalue.maxsize", 2 * 1024 * 1024);
+ put = new Put(ROW);
+ put.add(FAMILY, QUALIFIER, VALUE);
+ ht.put(put);
+ throw new IOException("Inserting a too large KeyValue worked, should throw exception");
+ } catch(Exception e) {}
+ conf.set("hbase.client.keyvalue.maxsize", oldMaxSize);
+ }
+
+ @Test
public void testFilters() throws Exception {
byte [] TABLE = Bytes.toBytes("testFilters");
HTable ht = TEST_UTIL.createTable(TABLE, FAMILY);