Index: src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java (revision 1341948) +++ src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java (working copy) @@ -16,15 +16,17 @@ */ package org.apache.hadoop.hbase.filter; -import junit.framework.TestCase; import org.apache.hadoop.hbase.SmallTests; +import org.junit.Test; import org.junit.experimental.categories.Category; +import static org.junit.Assert.assertEquals; + /** * Tests for the bit comparator */ @Category(SmallTests.class) -public class TestBitComparator extends TestCase { +public class TestBitComparator { private static byte[] zeros = new byte[]{0, 0, 0, 0, 0, 0}; private static byte[] ones = new byte[]{1, 1, 1, 1, 1, 1}; @@ -35,6 +37,7 @@ private final int Equal = 0; private final int NotEqual = 1; + @Test public void testANDOperation() { testOperation(zeros, ones, BitComparator.BitwiseOp.AND, NotEqual); testOperation(data1, ones, BitComparator.BitwiseOp.AND, Equal); @@ -44,6 +47,7 @@ testOperation(ones, data3, BitComparator.BitwiseOp.AND, NotEqual); } + @Test public void testOROperation() { testOperation(ones, zeros, BitComparator.BitwiseOp.OR, Equal); testOperation(zeros, zeros, BitComparator.BitwiseOp.OR, NotEqual); @@ -52,6 +56,7 @@ testOperation(ones, data3, BitComparator.BitwiseOp.OR, NotEqual); } + @Test public void testXOROperation() { testOperation(ones, zeros, BitComparator.BitwiseOp.XOR, Equal); testOperation(zeros, zeros, BitComparator.BitwiseOp.XOR, NotEqual); Index: src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java (revision 1341948) +++ src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java (working copy) @@ -28,15 +28,18 @@ import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import junit.framework.TestCase; +import org.junit.Before; +import org.junit.Test; import org.junit.experimental.categories.Category; +import static org.junit.Assert.assertTrue; + /** * Test for the ColumnPaginationFilter, used mainly to test the successful serialization of the filter. * More test functionality can be found within {@link org.apache.hadoop.hbase.filter.TestFilter#testColumnPaginationFilter()} */ @Category(SmallTests.class) -public class TestColumnPaginationFilter extends TestCase +public class TestColumnPaginationFilter { private static final byte[] ROW = Bytes.toBytes("row_1_test"); private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test"); @@ -45,9 +48,8 @@ private Filter columnPaginationFilter; - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { columnPaginationFilter = getColumnPaginationFilter(); } @@ -88,6 +90,7 @@ * Tests serialization * @throws Exception */ + @Test public void testSerialization() throws Exception { Filter newFilter = serializationTest(columnPaginationFilter); basicFilterTests((ColumnPaginationFilter)newFilter); Index: src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java (revision 1341948) +++ src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java (working copy) @@ -36,11 +36,16 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.util.Bytes; -import junit.framework.TestCase; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + import org.junit.experimental.categories.Category; @Category(SmallTests.class) -public class TestDependentColumnFilter extends TestCase { +public class TestDependentColumnFilter { private final Log LOG = LogFactory.getLog(this.getClass()); private static final byte[][] ROWS = { Bytes.toBytes("test1"),Bytes.toBytes("test2") @@ -57,31 +62,26 @@ Bytes.toBytes("bad1"), Bytes.toBytes("bad2"), Bytes.toBytes("bad3") }; private static final byte[] MATCH_VAL = Bytes.toBytes("match"); - private HBaseTestingUtility testUtil; + private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); List testVals; private HRegion region; - @Override - protected void setUp() throws Exception { - super.setUp(); - - testUtil = new HBaseTestingUtility(); - + @Before + public void setUp() throws Exception { testVals = makeTestVals(); - HTableDescriptor htd = new HTableDescriptor(getName()); + HTableDescriptor htd = new HTableDescriptor(this.getClass().getName()); htd.addFamily(new HColumnDescriptor(FAMILIES[0])); htd.addFamily(new HColumnDescriptor(FAMILIES[1])); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); - this.region = HRegion.createHRegion(info, testUtil.getDataTestDir(), - testUtil.getConfiguration(), htd); + this.region = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), + TEST_UTIL.getConfiguration(), htd); addData(); } - @Override - protected void tearDown() throws Exception { - super.tearDown(); + @After + public void tearDown() throws Exception { this.region.close(); region.getLog().closeAndDelete(); } @@ -161,6 +161,7 @@ /** * Test scans using a DependentColumnFilter */ + @Test public void testScans() throws Exception { Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER); @@ -216,6 +217,7 @@ * * @throws Exception */ + @Test public void testFilterDropping() throws Exception { Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER); List accepted = new ArrayList(); Index: src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java (revision 1341948) +++ src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java (working copy) @@ -27,7 +27,6 @@ import java.util.Arrays; import java.util.List; -import junit.framework.Assert; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.*; @@ -41,7 +40,13 @@ import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import org.junit.experimental.categories.Category; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import com.google.common.base.Throwables; @@ -49,9 +54,10 @@ * Test filters at the HRegion doorstep. */ @Category(SmallTests.class) -public class TestFilter extends HBaseTestCase { +public class TestFilter { private final static Log LOG = LogFactory.getLog(TestFilter.class); private HRegion region; + private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); // // Rows, Qualifiers, and Values are in two groups, One and Two. @@ -116,10 +122,9 @@ private long numRows = ROWS_ONE.length + ROWS_TWO.length; private long colsPerRow = FAMILIES.length * QUALIFIERS_ONE.length; - - protected void setUp() throws Exception { - super.setUp(); - HTableDescriptor htd = new HTableDescriptor(getName()); + @Before + public void setUp() throws Exception { + HTableDescriptor htd = new HTableDescriptor("TestFilter"); htd.addFamily(new HColumnDescriptor(FAMILIES[0])); htd.addFamily(new HColumnDescriptor(FAMILIES[1])); htd.addFamily(new HColumnDescriptor(FAMILIES_1[0])); @@ -128,7 +133,8 @@ htd.addFamily(new HColumnDescriptor(NEW_FAMILIES[1])); htd.addFamily(new HColumnDescriptor(FAMILIES_1[1])); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); - this.region = HRegion.createHRegion(info, this.testDir, this.conf, htd); + this.region = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), + TEST_UTIL.getConfiguration(), htd); // Insert first half for(byte [] ROW : ROWS_ONE) { @@ -200,14 +206,14 @@ numRows -= 2; } - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { HLog hlog = region.getLog(); region.close(); hlog.closeAndDelete(); - super.tearDown(); } - + @Test public void testRegionScannerReseek() throws Exception { // create new rows and column family to show how reseek works.. for (byte[] ROW : ROWS_THREE) { @@ -274,6 +280,7 @@ } } + @Test public void testNoFilter() throws Exception { // No filter long expectedRows = this.numRows; @@ -289,6 +296,7 @@ verifyScan(s, expectedRows, expectedKeys/2); } + @Test public void testPrefixFilter() throws Exception { // Grab rows from group one (half of total) long expectedRows = this.numRows / 2; @@ -298,6 +306,7 @@ verifyScan(s, expectedRows, expectedKeys); } + @Test public void testPageFilter() throws Exception { // KVs in first 6 rows @@ -393,6 +402,7 @@ * * @throws Exception */ + @Test public void testWhileMatchFilterWithFilterRow() throws Exception { final int pageSize = 4; @@ -407,13 +417,13 @@ scannerCounter++; if (scannerCounter >= pageSize) { - Assert.assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining()); + assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining()); } if (!isMoreResults) { break; } } - Assert.assertEquals("The page filter returned more rows than expected", pageSize, scannerCounter); + assertEquals("The page filter returned more rows than expected", pageSize, scannerCounter); } /** @@ -425,6 +435,7 @@ * * @throws Exception */ + @Test public void testWhileMatchFilterWithFilterRowKey() throws Exception { Scan s = new Scan(); String prefix = "testRowOne"; @@ -436,7 +447,7 @@ ArrayList values = new ArrayList(); boolean isMoreResults = scanner.next(values); if (!isMoreResults || !Bytes.toString(values.get(0).getRow()).startsWith(prefix)) { - Assert.assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining()); + assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining()); } if (!isMoreResults) { break; @@ -453,6 +464,7 @@ * * @throws Exception */ + @Test public void testWhileMatchFilterWithFilterKeyValue() throws Exception { Scan s = new Scan(); WhileMatchFilter filter = new WhileMatchFilter( @@ -464,13 +476,14 @@ while (true) { ArrayList values = new ArrayList(); boolean isMoreResults = scanner.next(values); - Assert.assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining()); + assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining()); if (!isMoreResults) { break; } } } + @Test public void testInclusiveStopFilter() throws IOException { // Grab rows from group one @@ -505,6 +518,7 @@ } + @Test public void testQualifierFilter() throws IOException { // Match two keys (one from each family) in half the rows @@ -662,7 +676,8 @@ } - public void testFamilyFilter() throws IOException { + @Test + public void testFamilyFilter() throws IOException { // Match family, only half of columns returned. long expectedRows = this.numRows; @@ -796,6 +811,7 @@ } + @Test public void testRowFilter() throws IOException { // Match a single row, all keys @@ -942,6 +958,7 @@ } + @Test public void testValueFilter() throws IOException { // Match group one rows @@ -1065,6 +1082,7 @@ verifyScanFull(s, kvs); } + @Test public void testSkipFilter() throws IOException { // Test for qualifier regex: "testQualifierOne-2" @@ -1102,6 +1120,7 @@ // TODO: This is important... need many more tests for ordering, etc // There are limited tests elsewhere but we need HRegion level ones here + @Test public void testFilterList() throws IOException { // Test getting a single row, single key using Row, Qualifier, and Value @@ -1134,6 +1153,7 @@ } + @Test public void testFirstKeyOnlyFilter() throws IOException { Scan s = new Scan(); s.setFilter(new FirstKeyOnlyFilter()); @@ -1149,6 +1169,7 @@ verifyScanFull(s, kvs); } + @Test public void testFilterListWithSingleColumnValueFilter() throws IOException { // Test for HBASE-3191 @@ -1225,6 +1246,7 @@ verifyScanFull(s, kvs); } + @Test public void testSingleColumnValueFilter() throws IOException { // From HBASE-1821 @@ -1470,6 +1492,7 @@ } + @Test public void testColumnPaginationFilter() throws Exception { // Set of KVs (page: 1; pageSize: 1) - the first set of 1 column per row @@ -1562,6 +1585,7 @@ this.verifyScanFull(s, expectedKVs4); } + @Test public void testKeyOnlyFilter() throws Exception { // KVs in first 6 rows Index: src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java (revision 1341948) +++ src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java (working copy) @@ -30,12 +30,16 @@ import java.util.Arrays; import java.util.List; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNull; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Test; import org.junit.experimental.categories.Category; /** @@ -43,7 +47,7 @@ * */ @Category(SmallTests.class) -public class TestFilterList extends TestCase { +public class TestFilterList { static final int MAX_PAGES = 2; static final char FIRST_CHAR = 'a'; static final char LAST_CHAR = 'e'; @@ -54,6 +58,7 @@ * Test "must pass one" * @throws Exception */ + @Test public void testMPONE() throws Exception { List filters = new ArrayList(); filters.add(new PageFilter(MAX_PAGES)); @@ -113,6 +118,7 @@ * Test "must pass all" * @throws Exception */ + @Test public void testMPALL() throws Exception { List filters = new ArrayList(); filters.add(new PageFilter(MAX_PAGES)); @@ -155,6 +161,7 @@ * Test list ordering * @throws Exception */ + @Test public void testOrdering() throws Exception { List filters = new ArrayList(); filters.add(new PrefixFilter(Bytes.toBytes("yyy"))); @@ -211,6 +218,7 @@ * Test serialization * @throws Exception */ + @Test public void testSerialization() throws Exception { List filters = new ArrayList(); filters.add(new PageFilter(MAX_PAGES)); @@ -236,6 +244,7 @@ /** * Test pass-thru of hints. */ + @Test public void testHintPassThru() throws Exception { final KeyValue minKeyValue = new KeyValue(Bytes.toBytes(0L), null, null); Index: src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java (revision 1341948) +++ src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java (working copy) @@ -27,23 +27,25 @@ import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import junit.framework.TestCase; +import org.junit.Before; +import org.junit.Test; import org.junit.experimental.categories.Category; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; /** * Tests the inclusive stop row filter */ @Category(SmallTests.class) -public class TestInclusiveStopFilter extends TestCase { +public class TestInclusiveStopFilter { private final byte [] STOP_ROW = Bytes.toBytes("stop_row"); private final byte [] GOOD_ROW = Bytes.toBytes("good_row"); private final byte [] PAST_STOP_ROW = Bytes.toBytes("zzzzzz"); Filter mainFilter; - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { mainFilter = new InclusiveStopFilter(STOP_ROW); } @@ -51,6 +53,7 @@ * Tests identification of the stop row * @throws Exception */ + @Test public void testStopRowIdentification() throws Exception { stopRowTests(mainFilter); } @@ -59,6 +62,7 @@ * Tests serialization * @throws Exception */ + @Test public void testSerialization() throws Exception { // Decompose mainFilter to bytes. ByteArrayOutputStream stream = new ByteArrayOutputStream(); Index: src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java (revision 1341948) +++ src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java (working copy) @@ -24,21 +24,24 @@ import java.io.DataInputStream; import java.io.DataOutputStream; -import junit.framework.TestCase; import org.apache.hadoop.hbase.SmallTests; +import org.junit.Test; import org.junit.experimental.categories.Category; +import static org.junit.Assert.*; + /** * Tests for the page filter */ @Category(SmallTests.class) -public class TestPageFilter extends TestCase { +public class TestPageFilter { static final int ROW_LIMIT = 3; /** * test page size filter * @throws Exception */ + @Test public void testPageSize() throws Exception { Filter f = new PageFilter(ROW_LIMIT); pageSizeTests(f); @@ -48,6 +51,7 @@ * Test filter serialization * @throws Exception */ + @Test public void testSerialization() throws Exception { Filter f = new PageFilter(ROW_LIMIT); // Decompose mainFilter to bytes. Index: src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java (revision 1341948) +++ src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java (working copy) @@ -20,10 +20,11 @@ package org.apache.hadoop.hbase.filter; -import junit.framework.TestCase; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Before; +import org.junit.Test; import org.junit.experimental.categories.Category; import java.io.ByteArrayInputStream; @@ -32,8 +33,10 @@ import java.io.DataOutputStream; import java.io.UnsupportedEncodingException; +import static org.junit.Assert.*; + @Category(SmallTests.class) -public class TestPrefixFilter extends TestCase { +public class TestPrefixFilter { Filter mainFilter; static final char FIRST_CHAR = 'a'; static final char LAST_CHAR = 'e'; @@ -48,19 +51,22 @@ } } - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { this.mainFilter = new PrefixFilter(Bytes.toBytes(HOST_PREFIX)); } + @Test public void testPrefixOnRow() throws Exception { prefixRowTests(mainFilter); } + @Test public void testPrefixOnRowInsideWhileMatchRow() throws Exception { prefixRowTests(new WhileMatchFilter(this.mainFilter), true); } + @Test public void testSerialization() throws Exception { // Decompose mainFilter to bytes. ByteArrayOutputStream stream = new ByteArrayOutputStream(); Index: src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java (revision 1341948) +++ src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java (working copy) @@ -25,19 +25,20 @@ import java.io.DataInputStream; import java.io.DataOutputStream; -import junit.framework.TestCase; - import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Before; +import org.junit.Test; import org.junit.experimental.categories.Category; +import static org.junit.Assert.*; + @Category(SmallTests.class) -public class TestRandomRowFilter extends TestCase { +public class TestRandomRowFilter { protected RandomRowFilter quarterChanceFilter; - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { quarterChanceFilter = new RandomRowFilter(0.25f); } @@ -46,6 +47,7 @@ * * @throws Exception */ + @Test public void testBasics() throws Exception { int included = 0; int max = 1000000; @@ -68,6 +70,7 @@ * * @throws Exception */ + @Test public void testSerialization() throws Exception { RandomRowFilter newFilter = serializationTest(quarterChanceFilter); // use epsilon float comparison Index: src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java (revision 1341948) +++ src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java (working copy) @@ -19,24 +19,23 @@ */ package org.apache.hadoop.hbase.filter; -import junit.framework.TestCase; - import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Test; import org.junit.experimental.categories.Category; +import static org.junit.Assert.*; + /** * Tests for {@link SingleColumnValueExcludeFilter}. Because this filter * extends {@link SingleColumnValueFilter}, only the added functionality is * tested. That is, method filterKeyValue(KeyValue). * - * @author ferdy - * */ @Category(SmallTests.class) -public class TestSingleColumnValueExcludeFilter extends TestCase { +public class TestSingleColumnValueExcludeFilter { private static final byte[] ROW = Bytes.toBytes("test"); private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test"); private static final byte[] COLUMN_QUALIFIER = Bytes.toBytes("foo"); @@ -48,6 +47,7 @@ * Test the overridden functionality of filterKeyValue(KeyValue) * @throws Exception */ + @Test public void testFilterKeyValue() throws Exception { Filter filter = new SingleColumnValueExcludeFilter(COLUMN_FAMILY, COLUMN_QUALIFIER, CompareOp.EQUAL, VAL_1); Index: src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java (revision 1341948) +++ src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java (working copy) @@ -30,14 +30,17 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.util.Bytes; -import junit.framework.TestCase; +import org.junit.Before; +import org.junit.Test; import org.junit.experimental.categories.Category; +import static org.junit.Assert.*; + /** * Tests the value filter */ @Category(SmallTests.class) -public class TestSingleColumnValueFilter extends TestCase { +public class TestSingleColumnValueFilter { private static final byte[] ROW = Bytes.toBytes("test"); private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test"); private static final byte [] COLUMN_QUALIFIER = Bytes.toBytes("foo"); @@ -58,9 +61,8 @@ Filter regexFilter; Filter regexPatternFilter; - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { basicFilter = basicFilterNew(); substrFilter = substrFilterNew(); regexFilter = regexFilterNew(); @@ -172,6 +174,7 @@ * Tests identification of the stop row * @throws Exception */ + @Test public void testStop() throws Exception { basicFilterTests((SingleColumnValueFilter)basicFilter); substrFilterTests(substrFilter); @@ -183,6 +186,7 @@ * Tests serialization * @throws Exception */ + @Test public void testSerialization() throws Exception { Filter newFilter = serializationTest(basicFilter); basicFilterTests((SingleColumnValueFilter)newFilter);