Index: src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java (revision 1500035) +++ src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java (working copy) @@ -25,6 +25,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -34,6 +35,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.filter.ParseFilter; import org.apache.hadoop.hbase.thrift.ThriftMetrics; import org.apache.hadoop.hbase.thrift2.generated.TColumn; import org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement; @@ -626,7 +628,6 @@ int scanId = handler.openScanner(table, scan); List results = null; for (int i = 0; i < 10; i++) { - System.out.println("batch: " + i); // get batch for single row (10x10 is what we expect) results = handler.getScannerRows(scanId, 1); assertEquals(1, results.size()); @@ -637,7 +638,6 @@ for (int y = 0; y < 10; y++) { int colNum = y + (10 * i); String colNumPad = pad(colNum, (byte) 3); - System.out.println("col" + colNumPad + ": " + new String(cols.get(y).getQualifier())); assertArrayEquals(("col" + colNumPad).getBytes(), cols.get(y).getQualifier()); } } @@ -656,6 +656,15 @@ } @Test + public void testFilterRegistration() throws Exception { + Configuration conf = UTIL.getConfiguration(); + conf.set("hbase.thrift.filters", "MyFilter:filterclass"); + ThriftServer.registerFilters(conf); + Map registeredFilters = ParseFilter.getAllFilters(); + assertEquals("filterclass", registeredFilters.get("MyFilter")); + } + + @Test public void testMetrics() throws Exception { Configuration conf = UTIL.getConfiguration(); ThriftMetrics metrics = getMetrics(conf); Index: src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java (revision 1500031) +++ src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java (working copy) @@ -40,6 +40,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.filter.ParseFilter; import org.apache.hadoop.hbase.thrift.CallQueue; import org.apache.hadoop.hbase.thrift.CallQueue.Call; import org.apache.hadoop.hbase.thrift.ThriftMetrics; @@ -193,6 +194,25 @@ } /** + * Adds the option to pre-load filters at startup. + * + * @param conf The current configuration instance. + */ + protected static void registerFilters(Configuration conf) { + String[] filters = conf.getStrings("hbase.thrift.filters"); + if(filters != null) { + for(String filterClass: filters) { + String[] filterPart = filterClass.split(":"); + if(filterPart.length != 2) { + log.warn("Invalid filter specification " + filterClass + " - skipping"); + } else { + ParseFilter.registerFilter(filterPart[0], filterPart[1]); + } + } + } + } + + /** * Start up the Thrift2 server. * * @param args @@ -237,6 +257,7 @@ conf.set("hbase.regionserver.thrift.server.type", implType); conf.setInt("hbase.regionserver.thrift.port", listenPort); + registerFilters(conf); // Construct correct ProtocolFactory boolean compact = cmd.hasOption("compact");