diff --git cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java index 4a2bae5c7c..836667f96f 100644 --- cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java +++ cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java @@ -43,7 +43,7 @@ import jline.console.ConsoleReader; import jline.console.completer.ArgumentCompleter; import jline.console.completer.Completer; -import junit.framework.TestCase; + import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; @@ -56,29 +56,39 @@ import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import org.junit.Before; +import org.junit.After; // Cannot call class TestCliDriver since that's the name of the generated // code for the script-based testing -public class TestCliDriverMethods extends TestCase { +/** + * TestCliDriverMethods. + */ +public class TestCliDriverMethods { SecurityManager securityManager; // Some of these tests require intercepting System.exit() using the SecurityManager. // It is safer to register/unregister our SecurityManager during setup/teardown instead // of doing it within the individual test cases. - @Override + @Before public void setUp() { securityManager = System.getSecurityManager(); System.setSecurityManager(new NoExitSecurityManager(securityManager)); } - @Override + @After public void tearDown() { System.setSecurityManager(securityManager); } // If the command has an associated schema, make sure it gets printed to use + @Test public void testThatCliDriverPrintsHeaderForCommandsWithSchema() { Schema mockSchema = mock(Schema.class); List fieldSchemas = new ArrayList(); @@ -93,6 +103,7 @@ public void testThatCliDriverPrintsHeaderForCommandsWithSchema() { } // If the command has no schema, make sure nothing is printed + @Test public void testThatCliDriverPrintsNoHeaderForCommandsWithNoSchema() { Schema mockSchema = mock(Schema.class); when(mockSchema.getFieldSchemas()).thenReturn(null); @@ -103,6 +114,7 @@ public void testThatCliDriverPrintsNoHeaderForCommandsWithNoSchema() { } // Test that CliDriver does not strip comments starting with '--' + @Test public void testThatCliDriverDoesNotStripComments() throws Exception { // We need to overwrite System.out and System.err as that is what is used in ShellCmdExecutor // So save old values... @@ -185,6 +197,7 @@ private PrintStream headerPrintingTestDriver(Schema mockSchema) { } + @Test public void testGetCommandCompletor() { Completer[] completors = CliDriver.getCommandCompleter(); assertEquals(2, completors.length); @@ -205,6 +218,7 @@ public void testGetCommandCompletor() { } + @Test public void testRun() throws Exception { // clean history String historyDirectory = System.getProperty("user.home"); @@ -243,6 +257,7 @@ public void testRun() throws Exception { /** * Test commands exit and quit */ + @Test public void testQuit() throws Exception { CliSessionState ss = new CliSessionState(new HiveConf()); @@ -273,6 +288,7 @@ public void testQuit() throws Exception { } + @Test public void testProcessSelectDatabase() throws Exception { CliSessionState sessinState = new CliSessionState(new HiveConf()); CliSessionState.start(sessinState); @@ -293,6 +309,7 @@ public void testProcessSelectDatabase() throws Exception { "FAILED: ParseException line 1:4 cannot recognize input near 'database'")); } + @Test public void testprocessInitFiles() throws Exception { String oldHiveHome = System.getenv("HIVE_HOME"); String oldHiveConfDir = System.getenv("HIVE_CONF_DIR"); diff --git common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java index 941e9e3560..fd7327494d 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java @@ -20,9 +20,15 @@ import java.util.Random; -import junit.framework.TestCase; -public class TestHiveBaseChar extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import org.junit.Test; + +/** + * HiveBaseChar Test. + */ +public class TestHiveBaseChar { static Random rnd = new Random(); public static int getRandomSupplementaryChar() { @@ -62,6 +68,7 @@ public static String createRandomSupplementaryCharString(int len) { return sb.toString(); } + @Test public void testStringLength() throws Exception { int strLen = 20; int[] lengths = { 15, 20, 25 }; @@ -83,6 +90,7 @@ public void testStringLength() throws Exception { assertNull(HiveBaseChar.enforceMaxLength(null, 0)); } + @Test public void testGetPaddedValue() { int strLen = 20; int[] lengths = { 15, 20, 25 }; diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java index bb23882c3b..b2f1bb3abc 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java @@ -24,11 +24,14 @@ import org.apache.logging.log4j.core.impl.Log4jContextFactory; import org.apache.logging.log4j.core.selector.ClassLoaderContextSelector; import org.apache.logging.log4j.core.selector.ContextSelector; -import org.junit.Test; -import junit.framework.TestCase; +import static org.junit.Assert.assertTrue; +import org.junit.Test; -public class TestHiveAsyncLogging extends TestCase { +/** + * HiveAsyncLogging Test. + */ +public class TestHiveAsyncLogging { // this test requires disruptor jar in classpath @Test diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java index eb2f9647a2..fd41c5bb98 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java @@ -17,18 +17,24 @@ */ package org.apache.hadoop.hive.conf; -import junit.framework.TestCase; + import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import org.junit.Before; import org.junit.Test; -public class TestHiveConfRestrictList extends TestCase { +/** + * HiveConfRestrictList Test. + */ +public class TestHiveConfRestrictList { private HiveConf conf = null; - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { + System.setProperty(ConfVars.HIVE_CONF_RESTRICTED_LIST.varname, ConfVars.HIVETESTMODEPREFIX.varname); conf = new HiveConf(); diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java index f9c3283f04..ab565051ae 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java @@ -23,7 +23,11 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.common.util.HiveTestUtils; -import junit.framework.TestCase; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import org.junit.Test; /** * TestHiveLogging @@ -31,7 +35,7 @@ * Test cases for HiveLogging, which is initialized in HiveConf. * Loads configuration files located in common/src/test/resources. */ -public class TestHiveLogging extends TestCase { +public class TestHiveLogging { public TestHiveLogging() { super(); } @@ -74,6 +78,7 @@ private void RunTest(File logFile, assertTrue(logFile + " should exist", logFile.exists()); } + @Test public void testHiveLogging() throws Exception { // customized log4j config log file to be: /${test.tmp.dir}/TestHiveLogging/hiveLog4jTest.log File customLogPath = new File(new File(System.getProperty("test.tmp.dir")), diff --git contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java index 5bc06e8445..ffffda5a87 100644 --- contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java +++ contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java @@ -22,15 +22,19 @@ import java.util.Iterator; import java.util.NoSuchElementException; -import junit.framework.TestCase; + import org.apache.hadoop.util.Shell; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * TestGenericMR. * */ -public final class TestGenericMR extends TestCase { +public final class TestGenericMR { + @Test public void testReduceTooFar() throws Exception { try { new GenericMR().reduce(new StringReader("a\tb\tc"), new StringWriter(), @@ -50,6 +54,7 @@ public void reduce(String key, Iterator records, fail("Expected NoSuchElementException"); } + @Test public void testEmptyMap() throws Exception { final StringWriter out = new StringWriter(); @@ -58,6 +63,7 @@ public void testEmptyMap() throws Exception { assertEquals(0, out.toString().length()); } + @Test public void testIdentityMap() throws Exception { final String in = "a\tb\nc\td"; final StringWriter out = new StringWriter(); @@ -66,6 +72,7 @@ public void testIdentityMap() throws Exception { assertEquals(in + "\n", out.toString()); } + @Test public void testKVSplitMap() throws Exception { final String in = "k1=v1,k2=v2\nk1=v2,k2=v3"; final String expected = "k1\tv1\nk2\tv2\nk1\tv2\nk2\tv3\n"; @@ -83,6 +90,7 @@ public void map(String[] record, Output output) throws Exception { assertEquals(expected, out.toString()); } + @Test public void testIdentityReduce() throws Exception { final String in = "a\tb\nc\td"; final StringWriter out = new StringWriter(); @@ -92,6 +100,7 @@ public void testIdentityReduce() throws Exception { assertEquals(in + "\n", out.toString()); } + @Test public void testWordCountReduce() throws Exception { final String in = "hello\t1\nhello\t2\nokay\t4\nokay\t6\nokay\t2"; final StringWriter out = new StringWriter(); diff --git contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java index 06d8aa5a34..2bca0a15b6 100644 --- contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java +++ contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java @@ -19,7 +19,7 @@ import java.util.Properties; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; @@ -29,12 +29,14 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * TestRegexSerDe. * */ -public class TestRegexSerDe extends TestCase { +public class TestRegexSerDe { private AbstractSerDe createSerDe(String fieldNames, String fieldTypes, String inputRegex, String outputFormatString) throws Throwable { @@ -52,6 +54,7 @@ private AbstractSerDe createSerDe(String fieldNames, String fieldTypes, /** * Test the LazySimpleSerDe class. */ + @Test public void testRegexSerDe() throws Throwable { try { // Create the SerDe diff --git druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java index 42bde3583e..b125f70307 100644 --- druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java +++ druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java @@ -28,12 +28,14 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import io.druid.query.Query; -import junit.framework.TestCase; + +import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * Test Class. */ -@SuppressWarnings("SameParameterValue") public class TestHiveDruidQueryBasedInputFormat extends TestCase { +@SuppressWarnings("SameParameterValue") public class TestHiveDruidQueryBasedInputFormat { private static final String TIMESERIES_QUERY = @@ -180,6 +182,7 @@ + "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":5,\"fromNext\":false}," + "\"context\":{\"druid.query.fetch\":true,\"queryId\":\"\"}}, [localhost:8082]}]"; + @Test public void testTimeZone() throws Exception { DruidQueryBasedInputFormat input = new DruidQueryBasedInputFormat(); diff --git hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java index de1e85817d..014d081c0a 100644 --- hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java +++ hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java @@ -28,7 +28,7 @@ import java.util.Properties; import junit.framework.Assert; -import junit.framework.TestCase; + import org.apache.avro.Schema; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericData; @@ -71,11 +71,16 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.thrift.TException; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertNull; +import org.junit.Test; /** * Tests the HBaseSerDe class. */ -public class TestHBaseSerDe extends TestCase { +public class TestHBaseSerDe { static final byte[] TEST_BYTE_ARRAY = Bytes.toBytes("test"); @@ -161,6 +166,7 @@ /** * Test the default behavior of the Lazy family of objects and object inspectors. */ + @Test public void testHBaseSerDeI() throws SerDeException { byte [] cfa = "cola".getBytes(); @@ -246,6 +252,7 @@ public void testHBaseSerDeI() throws SerDeException { deserializeAndSerialize(serDe, r, p, expectedFieldsData); } + @Test public void testHBaseSerDeWithTimestamp() throws SerDeException { // Create the SerDe HBaseSerDe serDe = new HBaseSerDe(); @@ -401,6 +408,7 @@ private Properties createPropertiesI_IV() { return tbl; } + @Test public void testHBaseSerDeII() throws SerDeException { byte [] cfa = "cfa".getBytes(); @@ -526,6 +534,7 @@ private Properties createPropertiesII_III() { return tbl; } + @Test public void testHBaseSerDeWithHiveMapToHBaseColumnFamily() throws SerDeException { byte [] cfint = "cf-int".getBytes(); @@ -680,6 +689,7 @@ private Properties createPropertiesForHiveMapHBaseColumnFamilyII() { return tbl; } + @Test public void testHBaseSerDeWithHiveMapToHBaseColumnFamilyII() throws SerDeException { byte [] cfbyte = "cf-byte".getBytes(); @@ -800,6 +810,7 @@ private void deserializeAndSerializeHiveMapHBaseColumnFamilyII( assertEquals("Serialized data: ", p.toString(), serializedPut.toString()); } + @Test public void testHBaseSerDeWithColumnPrefixes() throws Exception { byte[] cfa = "cola".getBytes(); @@ -919,6 +930,7 @@ private void deserializeAndSerializeHivePrefixColumnFamily(HBaseSerDe serDe, Res } } + @Test public void testHBaseSerDeCompositeKeyWithSeparator() throws SerDeException, TException, IOException { byte[] cfa = "cola".getBytes(); @@ -967,6 +979,7 @@ private Properties createPropertiesForCompositeKeyWithSeparator() { return tbl; } + @Test public void testHBaseSerDeCompositeKeyWithoutSeparator() throws SerDeException, TException, IOException { byte[] cfa = "cola".getBytes(); @@ -1041,6 +1054,7 @@ private void deserializeAndSerializeHBaseCompositeKey(HBaseSerDe serDe, Result r assertEquals("Serialized put:", p.toString(), put.toString()); } + @Test public void testHBaseSerDeWithAvroSchemaInline() throws SerDeException, IOException { byte[] cfa = "cola".getBytes(); @@ -1086,6 +1100,7 @@ private Properties createPropertiesForHiveAvroSchemaInline() { return tbl; } + @Test public void testHBaseSerDeWithForwardEvolvedSchema() throws SerDeException, IOException { byte[] cfa = "cola".getBytes(); @@ -1132,6 +1147,7 @@ private Properties createPropertiesForHiveAvroForwardEvolvedSchema() { return tbl; } + @Test public void testHBaseSerDeWithBackwardEvolvedSchema() throws SerDeException, IOException { byte[] cfa = "cola".getBytes(); @@ -1177,6 +1193,7 @@ private Properties createPropertiesForHiveAvroBackwardEvolvedSchema() { return tbl; } + @Test public void testHBaseSerDeWithAvroSerClass() throws SerDeException, IOException { byte[] cfa = "cola".getBytes(); @@ -1228,6 +1245,7 @@ private Properties createPropertiesForHiveAvroSerClass() { return tbl; } + @Test public void testHBaseSerDeWithAvroSchemaUrl() throws SerDeException, IOException { byte[] cfa = "cola".getBytes(); @@ -1292,6 +1310,7 @@ private Properties createPropertiesForHiveAvroSchemaUrl(String schemaUrl) { return tbl; } + @Test public void testHBaseSerDeWithAvroExternalSchema() throws SerDeException, IOException { byte[] cfa = "cola".getBytes(); @@ -1345,6 +1364,7 @@ private Properties createPropertiesForHiveAvroExternalSchema() { return tbl; } + @Test public void testHBaseSerDeWithHiveMapToHBaseAvroColumnFamily() throws Exception { byte[] cfa = "cola".getBytes(); @@ -1420,6 +1440,7 @@ private Properties createPropertiesForHiveAvroColumnFamilyMap() { return tbl; } + @Test public void testHBaseSerDeCustomStructValue() throws IOException, SerDeException { byte[] cfa = "cola".getBytes(); @@ -1458,6 +1479,7 @@ public void testHBaseSerDeCustomStructValue() throws IOException, SerDeException * and not the error in a production setup. The Properties.java object that is passed to the serDe * initializer, is passed with empty value "" for "columns.comments" key for hbase backed tables. */ + @Test public void testEmptyColumnComment() throws SerDeException { HBaseSerDe serDe = new HBaseSerDe(); Properties tbl = createPropertiesForValueStruct(); diff --git hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java index e0cf162f4f..0e4b566de3 100644 --- hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java +++ hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java @@ -23,7 +23,7 @@ import java.util.Collections; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; @@ -51,15 +51,20 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; +import org.junit.Test; /** * TestLazyHBaseObject is a test for the LazyHBaseXXX classes. */ -public class TestLazyHBaseObject extends TestCase { +public class TestLazyHBaseObject { /** * Test the LazyMap class with Integer-to-String. * @throws SerDeException */ + @Test public void testLazyHBaseCellMap1() throws SerDeException { // Map of Integer to String Text nullSequence = new Text("\\N"); @@ -122,6 +127,7 @@ public void testLazyHBaseCellMap1() throws SerDeException { * Test the LazyMap class with String-to-String. * @throws SerDeException */ + @Test public void testLazyHBaseCellMap2() throws SerDeException { // Map of String to String Text nullSequence = new Text("\\N"); @@ -185,128 +191,163 @@ public void testLazyHBaseCellMap2() throws SerDeException { * map are stored in binary format using the appropriate LazyPrimitive objects. * @throws SerDeException */ + @Test public void testLazyHBaseCellMap3() throws SerDeException { Text nullSequence = new Text("\\N"); - TypeInfo mapBinaryIntKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map"); - ObjectInspector oi = LazyFactory.createLazyObjectInspector( - mapBinaryIntKeyValue, new byte [] {(byte)1, (byte) 2}, 0, nullSequence, false, (byte) 0); - LazyHBaseCellMap hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi); - List kvs = new ArrayList(); byte [] rowKey = "row-key".getBytes(); byte [] cfInt = "cf-int".getBytes(); - kvs.add(new KeyValue(rowKey, cfInt, Bytes.toBytes(1), Bytes.toBytes(1))); - Result result = Result.create(kvs); - List mapBinaryStorage = new ArrayList(); - mapBinaryStorage.add(true); - mapBinaryStorage.add(true); - hbaseCellMap.init(result, cfInt, mapBinaryStorage); - IntWritable expectedIntValue = new IntWritable(1); - LazyPrimitive lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedIntValue); - assertEquals(expectedIntValue, lazyPrimitive.getWritableObject()); + List mapBinaryStorage = mapBinInt(nullSequence, kvs, rowKey, cfInt); + + mapBinByte(nullSequence, kvs, rowKey, mapBinaryStorage); + + mapBinShort(nullSequence, kvs, rowKey, mapBinaryStorage); + mapBinLong(nullSequence, kvs, rowKey, mapBinaryStorage); + + mapBinFloat(nullSequence, kvs, rowKey, mapBinaryStorage); + + mapBinDouble(nullSequence, kvs, rowKey, mapBinaryStorage); + + mapBinBoolean(nullSequence, kvs, rowKey, mapBinaryStorage); + } + +private void mapBinBoolean(Text nullSequence, List kvs, byte[] rowKey, List mapBinaryStorage) + throws SerDeException { + ObjectInspector oi; + LazyHBaseCellMap hbaseCellMap; + Result result; + LazyPrimitive lazyPrimitive; + TypeInfo mapBinaryBooleanKeyValue = + TypeInfoUtils.getTypeInfoFromTypeString("map"); + oi = LazyFactory.createLazyObjectInspector( + mapBinaryBooleanKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, + (byte) 0); + hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi); + byte [] cfBoolean = "cf-boolean".getBytes(); kvs.clear(); - kvs.add(new KeyValue( - rowKey, cfInt, Bytes.toBytes(Integer.MIN_VALUE), Bytes.toBytes(Integer.MIN_VALUE))); + kvs.add(new KeyValue(rowKey, cfBoolean, Bytes.toBytes(false), Bytes.toBytes(false))); result = Result.create(kvs); - hbaseCellMap.init(result, cfInt, mapBinaryStorage); - expectedIntValue = new IntWritable(Integer.MIN_VALUE); + hbaseCellMap.init(result, cfBoolean, mapBinaryStorage); + BooleanWritable expectedBooleanValue = new BooleanWritable(false); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedIntValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedBooleanValue); - assertEquals(expectedIntValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedBooleanValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue( - rowKey, cfInt, Bytes.toBytes(Integer.MAX_VALUE), Bytes.toBytes(Integer.MAX_VALUE))); + kvs.add(new KeyValue(rowKey, cfBoolean, Bytes.toBytes(true), Bytes.toBytes(true))); result = Result.create(kvs); - hbaseCellMap.init(result, cfInt, mapBinaryStorage); - expectedIntValue = new IntWritable(Integer.MAX_VALUE); + hbaseCellMap.init(result, cfBoolean, mapBinaryStorage); + expectedBooleanValue = new BooleanWritable(true); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedIntValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedBooleanValue); - assertEquals(expectedIntValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedBooleanValue, lazyPrimitive.getWritableObject()); +} - TypeInfo mapBinaryByteKeyValue = - TypeInfoUtils.getTypeInfoFromTypeString("map"); +private void mapBinDouble(Text nullSequence, List kvs, byte[] rowKey, List mapBinaryStorage) + throws SerDeException { + ObjectInspector oi; + LazyHBaseCellMap hbaseCellMap; + Result result; + LazyPrimitive lazyPrimitive; + TypeInfo mapBinaryDoubleKeyValue = + TypeInfoUtils.getTypeInfoFromTypeString("map"); oi = LazyFactory.createLazyObjectInspector( - mapBinaryByteKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, (byte) 0); + mapBinaryDoubleKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, + (byte) 0); hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi); - byte [] cfByte = "cf-byte".getBytes(); + byte [] cfDouble = "cf-double".getBytes(); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfByte, new byte [] {(byte) 1}, new byte [] {(byte) 1})); + kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(1.0), Bytes.toBytes(1.0))); result = Result.create(kvs); - hbaseCellMap.init(result, cfByte, mapBinaryStorage); - ByteWritable expectedByteValue = new ByteWritable((byte) 1); + hbaseCellMap.init(result, cfDouble, mapBinaryStorage); + DoubleWritable expectedDoubleValue = new DoubleWritable(1.0); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedByteValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedDoubleValue); - assertEquals(expectedByteValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfByte, new byte [] {Byte.MIN_VALUE}, - new byte [] {Byte.MIN_VALUE})); + kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(Double.MIN_VALUE), + Bytes.toBytes(Double.MIN_VALUE))); result = Result.create(kvs); - hbaseCellMap.init(result, cfByte, mapBinaryStorage); - expectedByteValue = new ByteWritable(Byte.MIN_VALUE); + hbaseCellMap.init(result, cfDouble, mapBinaryStorage); + expectedDoubleValue = new DoubleWritable(Double.MIN_VALUE); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedByteValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedDoubleValue); - assertEquals(expectedByteValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfByte, new byte [] {Byte.MAX_VALUE}, - new byte [] {Byte.MAX_VALUE})); + kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(Double.MAX_VALUE), + Bytes.toBytes(Double.MAX_VALUE))); result = Result.create(kvs); - hbaseCellMap.init(result, cfByte, mapBinaryStorage); - expectedByteValue = new ByteWritable(Byte.MAX_VALUE); + hbaseCellMap.init(result, cfDouble, mapBinaryStorage); + expectedDoubleValue = new DoubleWritable(Double.MAX_VALUE); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedByteValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedDoubleValue); - assertEquals(expectedByteValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject()); +} - TypeInfo mapBinaryShortKeyValue = - TypeInfoUtils.getTypeInfoFromTypeString("map"); +private void mapBinFloat(Text nullSequence, List kvs, byte[] rowKey, List mapBinaryStorage) + throws SerDeException { + ObjectInspector oi; + LazyHBaseCellMap hbaseCellMap; + Result result; + LazyPrimitive lazyPrimitive; + TypeInfo mapBinaryFloatKeyValue = + TypeInfoUtils.getTypeInfoFromTypeString("map"); oi = LazyFactory.createLazyObjectInspector( - mapBinaryShortKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, (byte) 0); + mapBinaryFloatKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, + (byte) 0); hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi); - byte [] cfShort = "cf-short".getBytes(); + byte [] cfFloat = "cf-float".getBytes(); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes((short) 1), Bytes.toBytes((short) 1))); + kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) 1.0F), + Bytes.toBytes((float) 1.0F))); result = Result.create(kvs); - hbaseCellMap.init(result, cfShort, mapBinaryStorage); - ShortWritable expectedShortValue = new ShortWritable((short) 1); + hbaseCellMap.init(result, cfFloat, mapBinaryStorage); + FloatWritable expectedFloatValue = new FloatWritable(1.0F); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedShortValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedFloatValue); - assertEquals(expectedShortValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes(Short.MIN_VALUE), - Bytes.toBytes(Short.MIN_VALUE))); + kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) Float.MIN_VALUE), + Bytes.toBytes((float) Float.MIN_VALUE))); result = Result.create(kvs); - hbaseCellMap.init(result, cfShort, mapBinaryStorage); - expectedShortValue = new ShortWritable(Short.MIN_VALUE); + hbaseCellMap.init(result, cfFloat, mapBinaryStorage); + expectedFloatValue = new FloatWritable(Float.MIN_VALUE); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedShortValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedFloatValue); - assertEquals(expectedShortValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes(Short.MAX_VALUE), - Bytes.toBytes(Short.MAX_VALUE))); + kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) Float.MAX_VALUE), + Bytes.toBytes((float) Float.MAX_VALUE))); result = Result.create(kvs); - hbaseCellMap.init(result, cfShort, mapBinaryStorage); - expectedShortValue = new ShortWritable(Short.MAX_VALUE); + hbaseCellMap.init(result, cfFloat, mapBinaryStorage); + expectedFloatValue = new FloatWritable(Float.MAX_VALUE); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedShortValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedFloatValue); - assertEquals(expectedShortValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject()); +} - TypeInfo mapBinaryLongKeyValue = +private void mapBinLong(Text nullSequence, List kvs, byte[] rowKey, List mapBinaryStorage) + throws SerDeException { + ObjectInspector oi; + LazyHBaseCellMap hbaseCellMap; + Result result; + LazyPrimitive lazyPrimitive; + TypeInfo mapBinaryLongKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map"); oi = LazyFactory.createLazyObjectInspector( mapBinaryLongKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, (byte) 0); @@ -343,119 +384,146 @@ public void testLazyHBaseCellMap3() throws SerDeException { (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedLongValue); assertEquals(expectedLongValue, lazyPrimitive.getWritableObject()); +} - TypeInfo mapBinaryFloatKeyValue = - TypeInfoUtils.getTypeInfoFromTypeString("map"); +private void mapBinShort(Text nullSequence, List kvs, byte[] rowKey, List mapBinaryStorage) + throws SerDeException { + ObjectInspector oi; + LazyHBaseCellMap hbaseCellMap; + Result result; + LazyPrimitive lazyPrimitive; + TypeInfo mapBinaryShortKeyValue = + TypeInfoUtils.getTypeInfoFromTypeString("map"); oi = LazyFactory.createLazyObjectInspector( - mapBinaryFloatKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, - (byte) 0); + mapBinaryShortKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, (byte) 0); hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi); - byte [] cfFloat = "cf-float".getBytes(); + byte [] cfShort = "cf-short".getBytes(); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) 1.0F), - Bytes.toBytes((float) 1.0F))); + kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes((short) 1), Bytes.toBytes((short) 1))); result = Result.create(kvs); - hbaseCellMap.init(result, cfFloat, mapBinaryStorage); - FloatWritable expectedFloatValue = new FloatWritable(1.0F); + hbaseCellMap.init(result, cfShort, mapBinaryStorage); + ShortWritable expectedShortValue = new ShortWritable((short) 1); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedFloatValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedShortValue); - assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedShortValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) Float.MIN_VALUE), - Bytes.toBytes((float) Float.MIN_VALUE))); + kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes(Short.MIN_VALUE), + Bytes.toBytes(Short.MIN_VALUE))); result = Result.create(kvs); - hbaseCellMap.init(result, cfFloat, mapBinaryStorage); - expectedFloatValue = new FloatWritable(Float.MIN_VALUE); + hbaseCellMap.init(result, cfShort, mapBinaryStorage); + expectedShortValue = new ShortWritable(Short.MIN_VALUE); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedFloatValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedShortValue); - assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedShortValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) Float.MAX_VALUE), - Bytes.toBytes((float) Float.MAX_VALUE))); + kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes(Short.MAX_VALUE), + Bytes.toBytes(Short.MAX_VALUE))); result = Result.create(kvs); - hbaseCellMap.init(result, cfFloat, mapBinaryStorage); - expectedFloatValue = new FloatWritable(Float.MAX_VALUE); + hbaseCellMap.init(result, cfShort, mapBinaryStorage); + expectedShortValue = new ShortWritable(Short.MAX_VALUE); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedFloatValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedShortValue); - assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedShortValue, lazyPrimitive.getWritableObject()); +} - TypeInfo mapBinaryDoubleKeyValue = - TypeInfoUtils.getTypeInfoFromTypeString("map"); +private void mapBinByte(Text nullSequence, List kvs, byte[] rowKey, List mapBinaryStorage) + throws SerDeException { + ObjectInspector oi; + LazyHBaseCellMap hbaseCellMap; + Result result; + LazyPrimitive lazyPrimitive; + TypeInfo mapBinaryByteKeyValue = + TypeInfoUtils.getTypeInfoFromTypeString("map"); oi = LazyFactory.createLazyObjectInspector( - mapBinaryDoubleKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, - (byte) 0); + mapBinaryByteKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, (byte) 0); hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi); - byte [] cfDouble = "cf-double".getBytes(); + byte [] cfByte = "cf-byte".getBytes(); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(1.0), Bytes.toBytes(1.0))); + kvs.add(new KeyValue(rowKey, cfByte, new byte [] {(byte) 1}, new byte [] {(byte) 1})); result = Result.create(kvs); - hbaseCellMap.init(result, cfDouble, mapBinaryStorage); - DoubleWritable expectedDoubleValue = new DoubleWritable(1.0); + hbaseCellMap.init(result, cfByte, mapBinaryStorage); + ByteWritable expectedByteValue = new ByteWritable((byte) 1); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedDoubleValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedByteValue); - assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedByteValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(Double.MIN_VALUE), - Bytes.toBytes(Double.MIN_VALUE))); + kvs.add(new KeyValue(rowKey, cfByte, new byte [] {Byte.MIN_VALUE}, + new byte [] {Byte.MIN_VALUE})); result = Result.create(kvs); - hbaseCellMap.init(result, cfDouble, mapBinaryStorage); - expectedDoubleValue = new DoubleWritable(Double.MIN_VALUE); + hbaseCellMap.init(result, cfByte, mapBinaryStorage); + expectedByteValue = new ByteWritable(Byte.MIN_VALUE); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedDoubleValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedByteValue); - assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedByteValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(Double.MAX_VALUE), - Bytes.toBytes(Double.MAX_VALUE))); + kvs.add(new KeyValue(rowKey, cfByte, new byte [] {Byte.MAX_VALUE}, + new byte [] {Byte.MAX_VALUE})); result = Result.create(kvs); - hbaseCellMap.init(result, cfDouble, mapBinaryStorage); - expectedDoubleValue = new DoubleWritable(Double.MAX_VALUE); + hbaseCellMap.init(result, cfByte, mapBinaryStorage); + expectedByteValue = new ByteWritable(Byte.MAX_VALUE); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedDoubleValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedByteValue); - assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedByteValue, lazyPrimitive.getWritableObject()); +} + +private List mapBinInt(Text nullSequence, List kvs, byte[] rowKey, byte[] cfInt) throws SerDeException { + TypeInfo mapBinaryIntKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map"); + ObjectInspector oi = LazyFactory.createLazyObjectInspector( + mapBinaryIntKeyValue, new byte [] {(byte)1, (byte) 2}, 0, nullSequence, false, (byte) 0); + LazyHBaseCellMap hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi); + + kvs.add(new KeyValue(rowKey, cfInt, Bytes.toBytes(1), Bytes.toBytes(1))); + Result result = Result.create(kvs); + List mapBinaryStorage = new ArrayList(); + mapBinaryStorage.add(true); + mapBinaryStorage.add(true); + hbaseCellMap.init(result, cfInt, mapBinaryStorage); + IntWritable expectedIntValue = new IntWritable(1); + LazyPrimitive lazyPrimitive = + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedIntValue); + + assertEquals(expectedIntValue, lazyPrimitive.getWritableObject()); - TypeInfo mapBinaryBooleanKeyValue = - TypeInfoUtils.getTypeInfoFromTypeString("map"); - oi = LazyFactory.createLazyObjectInspector( - mapBinaryBooleanKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, - (byte) 0); - hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi); - byte [] cfBoolean = "cf-boolean".getBytes(); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfBoolean, Bytes.toBytes(false), Bytes.toBytes(false))); + kvs.add(new KeyValue( + rowKey, cfInt, Bytes.toBytes(Integer.MIN_VALUE), Bytes.toBytes(Integer.MIN_VALUE))); result = Result.create(kvs); - hbaseCellMap.init(result, cfBoolean, mapBinaryStorage); - BooleanWritable expectedBooleanValue = new BooleanWritable(false); + hbaseCellMap.init(result, cfInt, mapBinaryStorage); + expectedIntValue = new IntWritable(Integer.MIN_VALUE); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedBooleanValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedIntValue); - assertEquals(expectedBooleanValue, lazyPrimitive.getWritableObject()); + assertEquals(expectedIntValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfBoolean, Bytes.toBytes(true), Bytes.toBytes(true))); + kvs.add(new KeyValue( + rowKey, cfInt, Bytes.toBytes(Integer.MAX_VALUE), Bytes.toBytes(Integer.MAX_VALUE))); result = Result.create(kvs); - hbaseCellMap.init(result, cfBoolean, mapBinaryStorage); - expectedBooleanValue = new BooleanWritable(true); + hbaseCellMap.init(result, cfInt, mapBinaryStorage); + expectedIntValue = new IntWritable(Integer.MAX_VALUE); lazyPrimitive = - (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedBooleanValue); + (LazyPrimitive) hbaseCellMap.getMapValueElement(expectedIntValue); - assertEquals(expectedBooleanValue, lazyPrimitive.getWritableObject()); - } + assertEquals(expectedIntValue, lazyPrimitive.getWritableObject()); + return mapBinaryStorage; +} /** * Test the LazyHBaseRow class with one-for-one mappings between * Hive fields and HBase columns. * @throws SerDeException */ + @Test public void testLazyHBaseRow1() throws SerDeException { List fieldTypeInfos = TypeInfoUtils.getTypeInfosFromTypeString( @@ -578,6 +646,7 @@ public void testLazyHBaseRow1() throws SerDeException { * an HBase column family. * @throws SerDeException */ + @Test public void testLazyHBaseRow2() throws SerDeException { // column family is mapped to Map List fieldTypeInfos = @@ -700,6 +769,7 @@ public void testLazyHBaseRow2() throws SerDeException { * are stored in binary format in HBase. * @throws SerDeException */ + @Test public void testLazyHBaseRow3() throws SerDeException { List fieldTypeInfos = TypeInfoUtils.getTypeInfosFromTypeString( diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java index 3cf172b5ea..2b57d8d8ae 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java @@ -59,9 +59,19 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import junit.framework.TestCase; -public class TestPermsGrp extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertTrue; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; + +/** + * TestPermsGrp. + */ +public class TestPermsGrp { private boolean isServerRunning = false; private HiveConf hcatConf; @@ -69,13 +79,13 @@ private HiveMetaStoreClient msc; private static final Logger LOG = LoggerFactory.getLogger(TestPermsGrp.class); - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { System.setSecurityManager(securityManager); } - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { if (isServerRunning) { return; @@ -104,6 +114,7 @@ protected void setUp() throws Exception { msc = new HiveMetaStoreClient(hcatConf); } + @Test public void testCustomPerms() throws Exception { String dbName = Warehouse.DEFAULT_DATABASE_NAME; diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java index a8aafb1474..c4896feb6d 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java @@ -21,7 +21,7 @@ import java.io.File; import java.io.IOException; -import junit.framework.TestCase; + import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; @@ -31,14 +31,21 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import org.junit.Before; +import org.junit.Test; /* Unit test for GitHub Howl issue #3 */ -public class TestUseDatabase extends TestCase { +/** + * TestUseDatabase. + */ +public class TestUseDatabase { private IDriver hcatDriver; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { HiveConf hcatConf = new HiveConf(this.getClass()); hcatConf.set(ConfVars.PREEXECHOOKS.varname, ""); @@ -54,6 +61,7 @@ protected void setUp() throws Exception { private final String dbName = "testUseDatabase_db"; private final String tblName = "testUseDatabase_tbl"; + @Test public void testAlterTablePass() throws Exception { hcatDriver.run("create database " + dbName); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java index b9d3f642dd..0bb341ede1 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java @@ -45,15 +45,20 @@ import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils; import junit.framework.Assert; -import junit.framework.TestCase; + import org.apache.pig.parser.AliasMasker; +import org.junit.Test; -public class TestDefaultHCatRecord extends TestCase { +/** + * TestDefaultHCatRecord. + */ +public class TestDefaultHCatRecord { /** * test that we properly serialize/deserialize HCatRecordS * @throws IOException */ + @Test public void testRYW() throws IOException { File f = new File("binary.dat"); @@ -87,12 +92,14 @@ public void testRYW() throws IOException { } + @Test public void testCompareTo() { HCatRecord[] recs = getHCatRecords(); Assert.assertTrue(HCatDataCheckUtil.compareRecords(recs[0], recs[1]) == 0); Assert.assertTrue(HCatDataCheckUtil.compareRecords(recs[4], recs[5]) == 0); } + @Test public void testEqualsObject() { HCatRecord[] recs = getHCatRecords(); @@ -104,6 +111,7 @@ public void testEqualsObject() { * Test get and set calls with type * @throws HCatException */ + @Test public void testGetSetByType1() throws HCatException { HCatRecord inpRec = getHCatRecords()[0]; HCatRecord newRec = new DefaultHCatRecord(inpRec.size()); @@ -132,6 +140,7 @@ public void testGetSetByType1() throws HCatException { * Test get and set calls with type * @throws HCatException */ + @Test public void testGetSetByType2() throws HCatException { HCatRecord inpRec = getGetSet2InpRec(); @@ -152,6 +161,7 @@ public void testGetSetByType2() throws HCatException { * Test type specific get/set methods on HCatRecord types added in Hive 13 * @throws HCatException */ + @Test public void testGetSetByType3() throws HCatException { HCatRecord inpRec = getHCat13TypesRecord(); HCatRecord newRec = new DefaultHCatRecord(inpRec.size()); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestHCatRecordSerDe.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestHCatRecordSerDe.java index 59715906c9..2c3c89b815 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestHCatRecordSerDe.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestHCatRecordSerDe.java @@ -26,7 +26,7 @@ import java.util.Properties; import junit.framework.Assert; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; @@ -35,8 +35,12 @@ import org.apache.hadoop.io.Writable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.junit.Test; -public class TestHCatRecordSerDe extends TestCase { +/** + * TestHCatRecordSerDe. + */ +public class TestHCatRecordSerDe { private static final Logger LOG = LoggerFactory.getLogger(TestHCatRecordSerDe.class); @@ -115,6 +119,7 @@ return data; } + @Test public void testRW() throws Exception { Configuration conf = new Configuration(); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java index 00cc1eeef7..583027fd34 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java @@ -41,9 +41,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import junit.framework.TestCase; -public class TestJsonSerDe extends TestCase { +import static org.junit.Assert.assertTrue; +import org.junit.Test; + +/** + * TestJsonSerDe. + */ +public class TestJsonSerDe { private static final Logger LOG = LoggerFactory.getLogger(TestJsonSerDe.class); @@ -139,6 +144,7 @@ return data; } + @Test public void testRW() throws Exception { Configuration conf = new Configuration(); @@ -174,6 +180,7 @@ public void testRW() throws Exception { } + @Test public void testRobustRead() throws Exception { /** * This test has been added to account for HCATALOG-436 @@ -264,6 +271,7 @@ String getInternalNames(String columnNames) { * Then it should still work, and ignore the "x" and "t" field and "c" subfield of "s", and it * should read k as null. */ + @Test public void testLooseJsonReadability() throws Exception { Configuration conf = new Configuration(); Properties props = new Properties(); @@ -291,6 +299,7 @@ public void testLooseJsonReadability() throws Exception { } + @Test public void testUpperCaseKey() throws Exception { Configuration conf = new Configuration(); Properties props = new Properties(); @@ -320,6 +329,7 @@ public void testUpperCaseKey() throws Exception { return retval; } + @Test public void testMapValues() throws Exception { Configuration conf = new Configuration(); Properties props = new Properties(); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchema.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchema.java index b71c963091..2824f4da84 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchema.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchema.java @@ -21,13 +21,23 @@ import java.util.ArrayList; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hive.hcatalog.common.HCatException; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertFalse; +import org.junit.Test; -public class TestHCatSchema extends TestCase { +/** + * + * TestHCatSchema. + */ +public class TestHCatSchema { + @Test public void testCannotAddFieldMoreThanOnce() throws HCatException { List fieldSchemaList = new ArrayList(); fieldSchemaList.add(new HCatFieldSchema("name", HCatFieldSchema.Type.STRING, "What's your handle?")); @@ -60,6 +70,7 @@ public void testCannotAddFieldMoreThanOnce() throws HCatException { assertEquals(2, schema.getFields().size()); } + @Test public void testHashCodeEquals() throws HCatException { HCatFieldSchema memberID1 = new HCatFieldSchema("memberID", HCatFieldSchema.Type.INT, "as a number"); HCatFieldSchema memberID2 = new HCatFieldSchema("memberID", HCatFieldSchema.Type.INT, "as a number"); @@ -75,6 +86,7 @@ public void testHashCodeEquals() throws HCatException { assertTrue("Expected hash codes to be equal", memberID1.hashCode() == memberID2.hashCode()); } + @Test public void testCannotInstantiateSchemaWithRepeatedFieldNames() throws HCatException { List fieldSchemaList = new ArrayList(); @@ -94,6 +106,7 @@ public void testCannotInstantiateSchemaWithRepeatedFieldNames() throws HCatExcep assertTrue(iae.getMessage().contains("Field named memberID already exists")); } } + @Test public void testRemoveAddField() throws HCatException { List fieldSchemaList = new ArrayList(); @@ -115,6 +128,7 @@ public void testRemoveAddField() throws HCatException { // HIVE-5336. Re-number the position after remove such that: // (1) getPosition on a column always returns a value between 0..schema.size()-1 // (2) getPosition() on 2 different columns should never give the same value. + @Test public void testRemoveAddField2() throws HCatException { List fieldSchemaList = new ArrayList(); HCatFieldSchema memberIDField = new HCatFieldSchema("memberID", HCatFieldSchema.Type.INT, "id as number"); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchemaUtils.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchemaUtils.java index 3655eea4cd..6455fade7b 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchemaUtils.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchemaUtils.java @@ -20,7 +20,7 @@ import java.io.PrintStream; -import junit.framework.TestCase; + import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.serde.serdeConstants; @@ -30,11 +30,17 @@ import org.apache.hive.hcatalog.data.schema.HCatFieldSchema.Category; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestHCatSchemaUtils extends TestCase { +/** + * TestHCatSchemaUtils. + */ +public class TestHCatSchemaUtils { private static final Logger LOG = LoggerFactory.getLogger(TestHCatSchemaUtils.class); + @Test public void testSimpleOperation() throws Exception { String typeString = "struct," @@ -53,6 +59,7 @@ public void testSimpleOperation() throws Exception { assertEquals(hsch.get(0).getTypeString(), typeString.toLowerCase()); } + @Test public void testHCatFieldSchemaConversion() throws Exception { FieldSchema stringFieldSchema = new FieldSchema("name1", serdeConstants.STRING_TYPE_NAME, "comment1"); HCatFieldSchema stringHCatFieldSchema = HCatSchemaUtils.getHCatFieldSchema(stringFieldSchema); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java index 4ac01dfc2c..416a01e277 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatOutputFormat.java @@ -26,7 +26,7 @@ import java.util.Map; import com.google.common.collect.Lists; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -49,8 +49,17 @@ import org.apache.hive.hcatalog.data.schema.HCatSchema; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - -public class TestHCatOutputFormat extends TestCase { +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; + +/** + * TestHCatOutputFormat. + */ +public class TestHCatOutputFormat { private static final Logger LOG = LoggerFactory.getLogger(TestHCatOutputFormat.class); private HiveMetaStoreClient client; @@ -59,9 +68,9 @@ private static final String dbName = "hcatOutputFormatTestDB"; private static final String tblName = "hcatOutputFormatTestTable"; - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { + hiveConf = new HiveConf(this.getClass()); try { @@ -74,10 +83,10 @@ protected void setUp() throws Exception { } } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { try { - super.tearDown(); + client.dropTable(dbName, tblName); client.dropDatabase(dbName); @@ -136,6 +145,7 @@ private void initTable() throws Exception { } + @Test public void testSetOutput() throws Exception { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "test outputformat"); @@ -175,6 +185,7 @@ private void publishTest(Job job) throws Exception { assertTrue(part.getSd().getLocation().contains("p1")); } + @Test public void testGetTableSchema() throws Exception { Configuration conf = new Configuration(); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java index b190e4bb9b..18058dc8dd 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java @@ -24,7 +24,7 @@ import java.util.List; import java.util.Properties; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -49,12 +49,14 @@ import org.apache.hadoop.mapreduce.TaskAttemptID; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * TestRCFile. * */ -public class TestRCFileMapReduceInputFormat extends TestCase { +public class TestRCFileMapReduceInputFormat { private static final Logger LOG = LoggerFactory.getLogger(TestRCFileMapReduceInputFormat.class); @@ -173,6 +175,7 @@ private static Properties createProperties() { } + @Test public void testSynAndSplit() throws IOException, InterruptedException { splitBeforeSync(); splitRightBeforeSync(); diff --git hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/TestReplicationTask.java hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/TestReplicationTask.java index dca56ee031..09dc5d8ca7 100644 --- hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/TestReplicationTask.java +++ hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/TestReplicationTask.java @@ -18,7 +18,7 @@ */ package org.apache.hive.hcatalog.api.repl; -import junit.framework.TestCase; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.NotificationEvent; @@ -30,9 +30,13 @@ import org.apache.hive.hcatalog.common.HCatConstants; import org.apache.hive.hcatalog.common.HCatException; import org.apache.hive.hcatalog.messaging.MessageFactory; +import static org.junit.Assert.assertTrue; import org.junit.Test; -public class TestReplicationTask extends TestCase{ +/** + * TestReplicationTask. + */ +public class TestReplicationTask { private static MessageFactory msgFactory = MessageFactory.getInstance(); @@ -68,7 +72,7 @@ public ReplicationTask create(HCatClient client, HCatNotificationEvent event) { } @Test - public static void testCreate() throws HCatException { + public void testCreate() throws HCatException { Table t = new Table(); t.setDbName("testdb"); t.setTableName("testtable"); diff --git hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestNoopCommand.java hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestNoopCommand.java index bdabb0d1cc..e13cf145bc 100644 --- hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestNoopCommand.java +++ hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestNoopCommand.java @@ -18,15 +18,19 @@ */ package org.apache.hive.hcatalog.api.repl.commands; -import junit.framework.TestCase; + import org.apache.hive.hcatalog.api.repl.Command; import org.apache.hive.hcatalog.api.repl.CommandTestUtils; +import static org.junit.Assert.assertEquals; import org.junit.Test; -public class TestNoopCommand extends TestCase { +/** + * TestNoopCommand. + */ +public class TestNoopCommand { @Test - public static void testCommand(){ + public void testCommand(){ int evid = 999; Command testCmd = new NoopCommand(evid); diff --git hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestDesc.java hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestDesc.java index 9ce714e6a6..3770c5301c 100644 --- hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestDesc.java +++ hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestDesc.java @@ -23,14 +23,19 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import junit.framework.TestCase; + import org.codehaus.jackson.map.ObjectMapper; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * TestDesc - Test the desc objects that are correctly converted to * and from json. This also sets every field of the TableDesc object. */ -public class TestDesc extends TestCase { +public class TestDesc { + @Test public void testTableDesc() throws Exception { diff --git hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestServer.java hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestServer.java index 21a685cf72..4491a35620 100644 --- hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestServer.java +++ hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestServer.java @@ -18,40 +18,54 @@ */ package org.apache.hive.hcatalog.templeton; -import junit.framework.TestCase; + import org.apache.hive.hcatalog.templeton.mock.MockServer; import java.util.List; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import org.junit.Before; +import org.junit.Test; /* * Test that the server code exists, and responds to basic requests. */ -public class TestServer extends TestCase { +/** + * TestServer. + */ +public class TestServer { MockServer server; + @Before public void setUp() { new Main(new String[]{}); // Initialize the config server = new MockServer(); } + @Test public void testServer() { assertNotNull(server); } + @Test public void testStatus() { assertEquals(server.status().get("status"), "ok"); } + @Test public void testVersions() { assertEquals(server.version().get("version"), "v1"); } + @Test public void testFormats() { assertEquals(1, server.requestFormats().size()); assertEquals( ((List)server.requestFormats().get("responseTypes")).get(0), "application/json"); } + @Test public void testVerifyPropertyParam() { // HIVE-15410: Though there are not restrictions to Hive table property key and it could be any // combination of the letters, digits and even punctuations, we support conventional property diff --git itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java index 62c109c45d..5ce3b94300 100644 --- itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java +++ itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java @@ -22,6 +22,8 @@ import static org.apache.hadoop.fs.permission.AclEntryType.OTHER; import static org.apache.hadoop.fs.permission.AclEntryType.USER; +import org.junit.After; + import java.lang.reflect.Method; import java.net.URI; import java.security.PrivilegedExceptionAction; @@ -102,8 +104,8 @@ protected String setupUser() { return userUgi.getShortUserName(); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { super.tearDown(); if (dfs != null) { diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java index 747efd86e4..9ddad99226 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreAuthorization.java @@ -29,10 +29,15 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; -import junit.framework.TestCase; +import static org.junit.Assert.assertTrue; +import org.junit.Test; -public class TestMetaStoreAuthorization extends TestCase { + +/** + * TestMetaStoreAuthorization. + */ +public class TestMetaStoreAuthorization { protected HiveConf conf = new HiveConf(); private int port; @@ -43,6 +48,7 @@ public void setup() throws Exception { conf.setTimeVar(ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY, 60, TimeUnit.SECONDS); } + @Test public void testIsWritable() throws Exception { setup(); String testDir = System.getProperty("test.warehouse.dir", "/tmp"); @@ -66,6 +72,7 @@ public void testIsWritable() throws Exception { } } + @Test public void testMetaStoreAuthorization() throws Exception { setup(); MetaStoreTestUtils.startMetaStoreWithRetry(conf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java index 4d26f3e32f..88c7efa724 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java @@ -33,9 +33,19 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import junit.framework.TestCase; -public class TestMetastoreVersion extends TestCase { +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; + +/** + * TestMetastoreVersion. + */ +public class TestMetastoreVersion { private static final Logger LOG = LoggerFactory.getLogger(TestMetastoreVersion.class); protected HiveConf hiveConf; private IDriver driver; @@ -43,9 +53,9 @@ private String testMetastoreDB; private IMetaStoreSchemaInfo metastoreSchemaInfo; - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { + Field defDb = HiveMetaStore.HMSHandler.class.getDeclaredField("currentUrl"); defDb.setAccessible(true); defDb.set(null, null); @@ -68,8 +78,8 @@ protected void setUp() throws Exception { System.getProperty("test.tmp.dir", "target/tmp"), "derby"); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { File metaStoreDir = new File(testMetastoreDB); if (metaStoreDir.exists()) { FileUtils.forceDeleteOnExit(metaStoreDir); @@ -79,6 +89,7 @@ protected void tearDown() throws Exception { /*** * Test config defaults */ + @Test public void testDefaults() { System.clearProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString()); hiveConf = new HiveConf(this.getClass()); @@ -90,6 +101,7 @@ public void testDefaults() { * Test schema verification property * @throws Exception */ + @Test public void testVersionRestriction () throws Exception { System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "true"); hiveConf = new HiveConf(this.getClass()); @@ -114,6 +126,7 @@ public void testVersionRestriction () throws Exception { * and version correctly * @throws Exception */ + @Test public void testMetastoreVersion () throws Exception { // let the schema and version be auto created System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); @@ -133,6 +146,7 @@ public void testMetastoreVersion () throws Exception { * Test that with verification enabled, hive works when the correct schema is already populated * @throws Exception */ + @Test public void testVersionMatching () throws Exception { System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); hiveConf = new HiveConf(this.getClass()); @@ -153,6 +167,7 @@ public void testVersionMatching () throws Exception { * Store garbage version in metastore and verify that hive fails when verification is on * @throws Exception */ + @Test public void testVersionMisMatch () throws Exception { System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); hiveConf = new HiveConf(this.getClass()); @@ -175,6 +190,7 @@ public void testVersionMisMatch () throws Exception { * version * @throws Exception */ + @Test public void testVersionCompatibility () throws Exception { System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); hiveConf = new HiveConf(this.getClass()); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestDBTokenStore.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestDBTokenStore.java index 64c606c8f7..20f95fe37e 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestDBTokenStore.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestDBTokenStore.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -32,9 +32,20 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation; import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport; import org.junit.Assert; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotSame; +import org.junit.Test; -public class TestDBTokenStore extends TestCase{ +/** + * TestDBTokenStore. + */ +public class TestDBTokenStore { + @Test public void testDBTokenStore() throws TokenStoreException, MetaException, IOException { DelegationTokenStore ts = new DBTokenStore(); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestZooKeeperTokenStore.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestZooKeeperTokenStore.java index 4c4cf7c1e8..0b4f2bc4f2 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestZooKeeperTokenStore.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/security/TestZooKeeperTokenStore.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.List; -import junit.framework.TestCase; + import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; @@ -37,16 +37,28 @@ import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.data.ACL; import org.junit.Assert; - -public class TestZooKeeperTokenStore extends TestCase { +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; + +/** + * TestZooKeeperTokenStore. + */ +public class TestZooKeeperTokenStore { private MiniZooKeeperCluster zkCluster = null; private CuratorFramework zkClient = null; private int zkPort = -1; private ZooKeeperTokenStore ts; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { File zkDataDir = new File(System.getProperty("test.tmp.dir")); if (this.zkCluster != null) { throw new IOException("Cluster already running"); @@ -59,8 +71,8 @@ protected void setUp() throws Exception { this.zkClient.start(); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { this.zkClient.close(); if (ts != null) { ts.close(); @@ -77,6 +89,7 @@ private Configuration createConf(String zkPath) { return conf; } + @Test public void testTokenStorage() throws Exception { String ZK_PATH = "/zktokenstore-testTokenStorage"; ts = new ZooKeeperTokenStore(); @@ -126,6 +139,7 @@ public void testTokenStorage() throws Exception { assertNull(ts.getToken(tokenId)); } + @Test public void testAclNoAuth() throws Exception { String ZK_PATH = "/zktokenstore-testAclNoAuth"; Configuration conf = createConf(ZK_PATH); @@ -143,6 +157,7 @@ public void testAclNoAuth() throws Exception { } } + @Test public void testAclInvalid() throws Exception { String ZK_PATH = "/zktokenstore-testAclInvalid"; String aclString = "sasl:hive/host@TEST.DOMAIN:cdrwa, fail-parse-ignored"; @@ -164,6 +179,7 @@ public void testAclInvalid() throws Exception { } } + @Test public void testAclPositive() throws Exception { String ZK_PATH = "/zktokenstore-testAcl"; Configuration conf = createConf(ZK_PATH); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaTool.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaTool.java index ba344d48f9..81b7ff0eb4 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaTool.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/tools/metatool/TestHiveMetaTool.java @@ -26,7 +26,7 @@ import java.util.List; import java.util.Map; -import junit.framework.TestCase; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; @@ -39,9 +39,14 @@ import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties; import org.apache.hadoop.util.StringUtils; import org.apache.thrift.TException; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; /** Integration tests for the HiveMetaTool program. */ -public class TestHiveMetaTool extends TestCase { +public class TestHiveMetaTool { private static final String DB_NAME = "TestHiveMetaToolDB"; private static final String TABLE_NAME = "simpleTbl"; private static final String LOCATION = "hdfs://nn.example.com/"; @@ -53,9 +58,9 @@ private HiveMetaStoreClient client; private OutputStream os; - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { + try { os = new ByteArrayOutputStream(); @@ -109,6 +114,7 @@ private void createTable() throws Exception { client.createTable(tbl); } + @Test public void testListFSRoot() throws Exception { HiveMetaTool.main(new String[] {"-listFSRoot"}); String out = os.toString(); @@ -116,6 +122,7 @@ public void testListFSRoot() throws Exception { out.contains(client.getDatabase(DB_NAME).getLocationUri())); } + @Test public void testExecuteJDOQL() throws Exception { HiveMetaTool.main( new String[] {"-executeJDOQL", "select locationUri from org.apache.hadoop.hive.metastore.model.MDatabase"}); @@ -124,6 +131,7 @@ public void testExecuteJDOQL() throws Exception { out.contains(client.getDatabase(DB_NAME).getLocationUri())); } + @Test public void testUpdateFSRootLocation() throws Exception { checkAvroSchemaURLProps(AVRO_URI); @@ -140,12 +148,12 @@ private void checkAvroSchemaURLProps(String expectedUri) throws TException { assertEquals(expectedUri, table.getSd().getParameters().get(AvroTableProperties.SCHEMA_URL.getPropName())); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { try { client.dropTable(DB_NAME, TABLE_NAME); client.dropDatabase(DB_NAME); - super.tearDown(); + client.close(); } catch (Throwable e) { System.err.println("Unable to close metastore"); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/BaseTestQueries.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/BaseTestQueries.java index 14951cdd0d..dffaa25a6f 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/BaseTestQueries.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/BaseTestQueries.java @@ -20,12 +20,12 @@ import java.io.File; -import junit.framework.TestCase; + /** * Base class for testing queries. */ -public abstract class BaseTestQueries extends TestCase { +public abstract class BaseTestQueries { protected final String inpDir = System .getProperty("hive.root") + "/ql/src/test/queries/clientpositive"; diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java index de3383355b..3ad272a07e 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java @@ -20,7 +20,7 @@ import java.net.URI; import java.util.HashMap; import junit.framework.JUnit4TestAdapter; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.FileSystem; @@ -33,12 +33,18 @@ import org.apache.hadoop.hive.ql.metadata.*; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; /** * Tests DDL with remote metastore service and second namenode (HIVE-6374) * */ -public class TestDDLWithRemoteMetastoreSecondNamenode extends TestCase { +public class TestDDLWithRemoteMetastoreSecondNamenode { static HiveConf conf; private static final String Database1Name = "db1_nondefault_nn"; @@ -65,9 +71,9 @@ private static int tests = 0; private static Boolean isInitialized = false; - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { + if (tests > 0) { return; } @@ -125,9 +131,9 @@ protected void setUp() throws Exception { } } - @Override - protected void tearDown() throws Exception { - super.tearDown(); + @After + public void tearDown() throws Exception { + if (--tests == 0) { cleanup(); shutdownMiniDfs(); @@ -255,6 +261,7 @@ private void createDatabaseAndCheck(String databaseName, String databaseLocation } } + @Test public void testAlterPartitionSetLocationNonDefaultNameNode() throws Exception { assertTrue("Test suite should have been initialized", isInitialized); String tableLocation = tmppathFs2 + "/" + "test_set_part_loc"; @@ -264,6 +271,7 @@ public void testAlterPartitionSetLocationNonDefaultNameNode() throws Exception { alterPartitionAndCheck(table, "p", "p1", "/tmp/test/2"); } + @Test public void testCreateDatabaseWithTableNonDefaultNameNode() throws Exception { assertTrue("Test suite should be initialied", isInitialized ); final String tableLocation = tmppathFs2 + "/" + Table3Name; diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java index 405d21cdb2..e84a51e869 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java @@ -24,6 +24,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import static org.junit.Assert.fail; + import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; /** diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java index 62c037edc8..9c67391bab 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql; import java.io.File; +import static org.junit.Assert.fail; /** * Suite for testing running of queries in multi-threaded mode. diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java index 5fd0ef9161..3c40983c80 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java @@ -24,7 +24,7 @@ import java.util.LinkedList; import java.util.Map; -import junit.framework.TestCase; + import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -46,12 +46,17 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.tools.LineageInfo; import org.apache.hadoop.mapred.TextInputFormat; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import org.junit.Before; +import org.junit.Test; /** * TestHiveHistory. * */ -public class TestHiveHistory extends TestCase { +public class TestHiveHistory { static HiveConf conf; @@ -63,8 +68,8 @@ * intialize the tables */ - @Override - protected void setUp() { + @Before + public void setUp() { try { conf = new HiveConf(HiveHistory.class); SessionState.start(conf); @@ -120,6 +125,7 @@ protected void setUp() { /** * Check history file output for this query. */ + @Test public void testSimpleQuery() { new LineageInfo(); try { @@ -174,6 +180,7 @@ public void testSimpleQuery() { } } + @Test public void testQueryloglocParentDirNotExist() throws Exception { String parentTmpDir = tmpdir + "/HIVE2654"; Path parentDirPath = new Path(parentTmpDir); @@ -203,6 +210,7 @@ public void testQueryloglocParentDirNotExist() throws Exception { * Check if HiveHistoryImpl class is returned when hive history is enabled * @throws Exception */ + @Test public void testHiveHistoryConfigEnabled() throws Exception { HiveConf conf = new HiveConf(SessionState.class); conf.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, true); @@ -216,6 +224,7 @@ public void testHiveHistoryConfigEnabled() throws Exception { * Check if HiveHistory class is a Proxy class when hive history is disabled * @throws Exception */ + @Test public void testHiveHistoryConfigDisabled() throws Exception { HiveConf conf = new HiveConf(SessionState.class); conf.setBoolVar(ConfVars.HIVE_SESSION_HISTORY_ENABLED, false); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java index 58ac4aca81..8cdd75b1fa 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java @@ -20,7 +20,7 @@ import java.util.Map; -import junit.framework.TestCase; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -29,9 +29,16 @@ import org.apache.hadoop.hive.ql.IDriver; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import org.junit.Test; -public class TestSemanticAnalyzerHookLoading extends TestCase { +/** + * TestSemanticAnalyzerHookLoading. + */ +public class TestSemanticAnalyzerHookLoading { + @Test public void testHookLoading() throws Exception{ HiveConf conf = new HiveConf(this.getClass()); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java index 1f6ec27a20..c5fffd23ad 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java @@ -22,13 +22,17 @@ import java.util.HashMap; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; +import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.InvalidObjectException; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.Table; @@ -38,22 +42,28 @@ import org.apache.hadoop.hive.ql.security.DummyHiveMetastoreAuthorizationProvider.AuthCallContext; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.thrift.TException; import org.junit.Assert; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; /** * TestAuthorizationPreEventListener. Test case for * {@link org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener} and * {@link org.apache.hadoop.hive.metastore.MetaStorePreEventListener} */ -public class TestAuthorizationPreEventListener extends TestCase { +public class TestAuthorizationPreEventListener { private HiveConf clientHiveConf; private HiveMetaStoreClient msc; private IDriver driver; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { + - super.setUp(); System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, AuthorizationPreEventListener.class.getName()); @@ -79,9 +89,9 @@ protected void setUp() throws Exception { driver = DriverFactory.newDriver(clientHiveConf); } - @Override - protected void tearDown() throws Exception { - super.tearDown(); + @After + public void tearDown() throws Exception { + } private void validateCreateDb(Database expectedDb, Database actualDb) { @@ -164,6 +174,7 @@ private void validateAlterDb(Database expectedDb, Database actualDb) { assertEquals(expectedDb, actualDb); } + @Test public void testListener() throws Exception { String dbName = "hive3705"; String tblName = "tmptbl"; @@ -276,7 +287,13 @@ public void testListener() throws Exception { validateDropTable(tbl, tableFromDropTableEvent); // verify that we can create a table with IF/OF to some custom non-existent format - Table tCustom = tbl.deepCopy(); + verifyListener(dbName, authCalls, db, tbl); + } + +private void verifyListener(String dbName, List authCalls, Database db, Table tbl) + throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException { + int listSize; + Table tCustom = tbl.deepCopy(); tCustom.getSd().setInputFormat("org.apache.hive.dummy.DoesNotExistInputFormat"); tCustom.getSd().setOutputFormat("org.apache.hive.dummy.DoesNotExistOutputFormat"); if (tCustom.getSd().getSerdeInfo() == null){ @@ -351,7 +368,7 @@ public void testListener() throws Exception { DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB); validateDropDb(db, dbFromDropDatabaseEvent); - } +} public Object assertAndExtractSingleObjectFromEvent(int listSize, List authCalls, diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java index ce55f91bb3..b284777536 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java @@ -21,7 +21,7 @@ import java.util.ArrayList; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; @@ -37,12 +37,19 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.shims.Utils; import org.apache.hadoop.security.UserGroupInformation; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; /** * TestClientSideAuthorizationProvider : Simple base test for client side * Authorization Providers. By default, tests DefaultHiveAuthorizationProvider */ -public class TestClientSideAuthorizationProvider extends TestCase { +public class TestClientSideAuthorizationProvider { protected HiveConf clientHiveConf; protected HiveMetaStoreClient msc; protected IDriver driver; @@ -54,10 +61,10 @@ protected String getAuthorizationProvider(){ } - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { + - super.setUp(); // Turn off metastore-side authorization System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, @@ -89,9 +96,9 @@ protected void setUp() throws Exception { driver = DriverFactory.newDriver(clientHiveConf); } - @Override - protected void tearDown() throws Exception { - super.tearDown(); + @After + public void tearDown() throws Exception { + } private void validateCreateDb(Database expectedDb, String dbName) { @@ -112,6 +119,7 @@ protected String getTestTableName(){ return "smp_cl_tbl"; } + @Test public void testSimplePrivileges() throws Exception { String dbName = getTestDbName(); String tblName = getTestTableName(); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java index 0e08e81f20..fd600af6f9 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java @@ -23,7 +23,7 @@ import java.util.HashMap; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.cli.CliSessionState; @@ -52,6 +52,12 @@ import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; /** * TestHiveMetastoreAuthorizationProvider. Test case for @@ -68,7 +74,7 @@ * This test is also intended to be extended to provide tests for other * authorization providers like StorageBasedAuthorizationProvider */ -public class TestMetastoreAuthorizationProvider extends TestCase { +public class TestMetastoreAuthorizationProvider { private static final Logger LOG = LoggerFactory.getLogger(TestMetastoreAuthorizationProvider.class); protected HiveConf clientHiveConf; @@ -85,10 +91,10 @@ protected HiveConf createHiveConf() throws Exception { return new HiveConf(this.getClass()); } - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { + - super.setUp(); // Turn on metastore-side authorization System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, @@ -130,9 +136,9 @@ protected void setupMetaStoreReadAuthorization() { System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_AUTH_READS.varname, "false"); } - @Override - protected void tearDown() throws Exception { - super.tearDown(); + @After + public void tearDown() throws Exception { + } private void validateCreateDb(Database expectedDb, String dbName) { @@ -161,38 +167,31 @@ protected String setupUser() { return ugi.getUserName(); } + @Test public void testSimplePrivileges() throws Exception { if (!isTestEnabled()) { System.out.println("Skipping test " + this.getClass().getName()); return; } - String dbName = getTestDbName(); String tblName = getTestTableName(); String userName = setupUser(); - allowCreateDatabase(userName); - CommandProcessorResponse ret = driver.run("create database " + dbName); assertEquals(0,ret.getResponseCode()); Database db = msc.getDatabase(dbName); String dbLocn = db.getLocationUri(); - validateCreateDb(db,dbName); disallowCreateInDb(dbName, userName, dbLocn); - disallowCreateDatabase(userName); - driver.run("use " + dbName); ret = driver.run( String.format("create table %s (a string) partitioned by (b string)", tblName)); - assertEquals(1,ret.getResponseCode()); // Even if table location is specified table creation should fail String tblNameLoc = tblName + "_loc"; String tblLocation = new Path(dbLocn).getParent().toUri() + "/" + tblNameLoc; - driver.run("use " + dbName); ret = driver.run( String.format("create table %s (a string) partitioned by (b string) location '" + @@ -200,10 +199,8 @@ public void testSimplePrivileges() throws Exception { assertEquals(1, ret.getResponseCode()); // failure from not having permissions to create table - ArrayList fields = new ArrayList(2); fields.add(new FieldSchema("a", serdeConstants.STRING_TYPE_NAME, "")); - Table ttbl = new Table(); ttbl.setDbName(dbName); ttbl.setTableName(tblName); @@ -232,7 +229,6 @@ public void testSimplePrivileges() throws Exception { assertNoPrivileges(me); allowCreateInDb(dbName, userName, dbLocn); - driver.run("use " + dbName); ret = driver.run( String.format("create table %s (a string) partitioned by (b string)", tblName)); @@ -241,7 +237,6 @@ public void testSimplePrivileges() throws Exception { Table tbl = msc.getTable(dbName, tblName); Assert.assertTrue(tbl.isSetId()); tbl.unsetId(); - validateCreateTable(tbl,tblName, dbName); // Table creation should succeed even if location is specified @@ -320,7 +315,6 @@ public void testSimplePrivileges() throws Exception { disallowDropOnTable(tblName, userName, tbl.getSd().getLocation()); ret = driver.run("drop table "+tbl.getTableName()); assertEquals(1,ret.getResponseCode()); - } protected void allowCreateDatabase(String userName) diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java index 086ef9e6cb..d0645d74e2 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java @@ -25,6 +25,9 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; /** * TestStorageBasedClientSideAuthorizationProvider : Overrides * TestClientSideAuthorizationProvider to test StorageBasedAuthorizationProvider diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java index 0783e61156..fe8e15faa8 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java @@ -27,6 +27,8 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; /** * TestStorageBasedMetastoreAuthorizationProvider. Test case for * StorageBasedAuthorizationProvider, by overriding methods defined in diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java index b39871205d..b31e5b897f 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/TestSerdeWithFieldComments.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.serde2; -import junit.framework.TestCase; + import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -30,8 +30,13 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestSerdeWithFieldComments extends TestCase { +/** + * TestSerdeWithFieldComments. + */ +public class TestSerdeWithFieldComments { private StructField mockedStructField(String name, String oiTypeName, String comment) { @@ -47,6 +52,7 @@ private StructField mockedStructField(String name, String oiTypeName, return m; } + @Test public void testFieldComments() throws MetaException, SerDeException { StructObjectInspector mockSOI = mock(StructObjectInspector.class); when(mockSOI.getCategory()).thenReturn(ObjectInspector.Category.STRUCT); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java index 9ef678c62f..8fbd4d46e7 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java @@ -26,19 +26,23 @@ import java.util.Random; import java.util.Map.Entry; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol; import org.apache.hadoop.io.BytesWritable; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertTrue; +import org.junit.Test; /** * TestDynamicSerDe. * */ -public class TestDynamicSerDe extends TestCase { +public class TestDynamicSerDe { public static HashMap makeHashMap(String... params) { HashMap r = new HashMap(); @@ -48,6 +52,7 @@ return r; } + @Test public void testDynamicSerDe() throws Throwable { try { @@ -284,6 +289,7 @@ private void sort(Object[] structs) { } } + @Test public void testTBinarySortableProtocol() throws Throwable { try { @@ -402,6 +408,7 @@ public void testTBinarySortableProtocol() throws Throwable { } } + @Test public void testConfigurableTCTLSeparated() throws Throwable { try { @@ -481,6 +488,7 @@ public void testConfigurableTCTLSeparated() throws Throwable { * Tests a single null list within a struct with return nulls on. */ + @Test public void testNulls1() throws Throwable { try { @@ -533,6 +541,7 @@ public void testNulls1() throws Throwable { * Tests all elements of a struct being null with return nulls on. */ + @Test public void testNulls2() throws Throwable { try { @@ -591,6 +600,7 @@ public void testNulls2() throws Throwable { * Tests map and list being empty with return nulls on. */ + @Test public void testNulls3() throws Throwable { try { @@ -649,6 +659,7 @@ public void testNulls3() throws Throwable { * Tests map and list null/empty with return nulls *off*. */ + @Test public void testNulls4() throws Throwable { try { @@ -709,6 +720,7 @@ public void testNulls4() throws Throwable { * Tests map and list null/empty with return nulls *off*. */ + @Test public void testStructsinStructs() throws Throwable { try { @@ -775,6 +787,7 @@ public void testStructsinStructs() throws Throwable { } + @Test public void testSkip() throws Throwable { try { diff --git ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java index 1becbb8994..c861107ecb 100644 --- ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java +++ ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java @@ -50,7 +50,12 @@ import com.google.common.collect.Lists; -import junit.framework.TestCase; + +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; /** * Tests hive metastore expression support. This should be moved in metastore module @@ -60,13 +65,13 @@ * it doesn't test all the edge cases of the filter (if classes were merged, perhaps the * filter test could be rolled into it); assumption is that they use the same path in SQL/JDO. */ -public class TestMetastoreExpr extends TestCase { +public class TestMetastoreExpr { protected static HiveMetaStoreClient client; - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { try { - super.tearDown(); + client.close(); } catch (Throwable e) { System.err.println("Unable to close metastore"); @@ -75,9 +80,9 @@ protected void tearDown() throws Exception { } } - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { + try { client = new HiveMetaStoreClient(new HiveConf(this.getClass())); } catch (Throwable e) { @@ -98,6 +103,7 @@ private static void silentDropDatabase(String dbName) throws TException { } } + @Test public void testPartitionExpr() throws Exception { String dbName = "filterdb"; String tblName = "filtertbl"; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index 78f25856a4..b319c4b92d 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -24,7 +24,7 @@ import java.util.LinkedList; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.metastore.Warehouse; import org.slf4j.Logger; @@ -63,13 +63,16 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.mapred.TextInputFormat; +import static org.junit.Assert.assertEquals; +import org.junit.Before; +import org.junit.Test; /** * Mimics the actual query compiler in generating end to end plans and testing * them out. * */ -public class TestExecDriver extends TestCase { +public class TestExecDriver { static QueryState queryState; static HiveConf conf; @@ -153,8 +156,8 @@ MapredWork mr; - @Override - protected void setUp() { + @Before + public void setUp() { mr = PlanUtils.getMapRedWork(); ctx = new CompilationOpContext(); } @@ -491,6 +494,7 @@ private void executePlan() throws Exception { LOG.info(testName + " execution completed successfully"); } + @Test public void testMapPlan1() throws Exception { LOG.info("Beginning testMapPlan1"); @@ -499,6 +503,7 @@ public void testMapPlan1() throws Exception { fileDiff("lt100.txt.deflate", "mapplan1.out"); } + @Test public void testMapPlan2() throws Exception { LOG.info("Beginning testMapPlan2"); @@ -507,6 +512,7 @@ public void testMapPlan2() throws Exception { fileDiff("lt100.txt", "mapplan2.out"); } + @Test public void testMapRedPlan1() throws Exception { LOG.info("Beginning testMapRedPlan1"); @@ -516,6 +522,7 @@ public void testMapRedPlan1() throws Exception { fileDiff("kv1.val.sorted.txt", "mapredplan1.out"); } + @Test public void testMapRedPlan2() throws Exception { LOG.info("Beginning testMapPlan2"); @@ -525,6 +532,7 @@ public void testMapRedPlan2() throws Exception { fileDiff("lt100.sorted.txt", "mapredplan2.out"); } + @Test public void testMapRedPlan3() throws Exception { LOG.info("Beginning testMapPlan3"); @@ -534,6 +542,7 @@ public void testMapRedPlan3() throws Exception { fileDiff("kv1kv2.cogroup.txt", "mapredplan3.out"); } + @Test public void testMapRedPlan4() throws Exception { LOG.info("Beginning testMapPlan4"); @@ -543,6 +552,7 @@ public void testMapRedPlan4() throws Exception { fileDiff("kv1.string-sorted.txt", "mapredplan4.out"); } + @Test public void testMapRedPlan5() throws Exception { LOG.info("Beginning testMapPlan5"); @@ -552,6 +562,7 @@ public void testMapRedPlan5() throws Exception { fileDiff("kv1.string-sorted.txt", "mapredplan5.out"); } + @Test public void testMapRedPlan6() throws Exception { LOG.info("Beginning testMapPlan6"); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java index 32f14999f2..34fe2b9dcf 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java @@ -20,7 +20,7 @@ import java.util.ArrayList; -import junit.framework.TestCase; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -40,12 +40,15 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Before; +import org.junit.Test; /** * TestExpressionEvaluator. * */ -public class TestExpressionEvaluator extends TestCase { +public class TestExpressionEvaluator { // this is our row to test expressions on protected InspectableObject r; @@ -97,10 +100,11 @@ public TestExpressionEvaluator() { } } - @Override - protected void setUp() { + @Before + public void setUp() { } + @Test public void testExprNodeColumnEvaluator() throws Throwable { try { // get a evaluator for a simple field expression @@ -136,6 +140,7 @@ private static ExprNodeDesc getListIndexNode(ExprNodeDesc node, children); } + @Test public void testExprNodeFuncEvaluator() throws Throwable { try { // get a evaluator for a string concatenation expression @@ -161,6 +166,7 @@ public void testExprNodeFuncEvaluator() throws Throwable { } } + @Test public void testExprNodeConversionEvaluator() throws Throwable { try { // get a evaluator for a string concatenation expression @@ -204,6 +210,7 @@ private static void measureSpeed(String expr, int times, + " seconds/million call."); } + @Test public void testExprNodeSpeed() throws Throwable { try { int basetimes = 100000; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java index 40d60f36de..375e3d18d7 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java @@ -23,7 +23,7 @@ import java.util.LinkedList; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.conf.HiveConf; @@ -50,8 +50,17 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.junit.Assert; - -public class TestFunctionRegistry extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; + +/** + * FunctionRegistry Test. + */ +public class TestFunctionRegistry { public class TestUDF { public void same(DoubleWritable x, DoubleWritable y) {} @@ -76,8 +85,8 @@ public void typeaffinity2(DoubleWritable x) {} TypeInfo char5; TypeInfo char10; - @Override - protected void setUp() { + @Before + public void setUp() { String maxVarcharTypeName = "varchar(" + HiveVarchar.MAX_VARCHAR_LENGTH + ")"; maxVarchar = TypeInfoFactory.getPrimitiveTypeInfo(maxVarcharTypeName); varchar10 = TypeInfoFactory.getPrimitiveTypeInfo("varchar(10)"); @@ -91,6 +100,7 @@ private void implicit(TypeInfo a, TypeInfo b, boolean convertible) { assertEquals(convertible, TypeInfoUtils.implicitConvertible(a, b)); } + @Test public void testImplicitConversion() { implicit(TypeInfoFactory.intTypeInfo, TypeInfoFactory.decimalTypeInfo, true); implicit(TypeInfoFactory.longTypeInfo, TypeInfoFactory.decimalTypeInfo, true); @@ -149,6 +159,7 @@ private void typeAffinity(String methodName, TypeInfo inputType, } } + @Test public void testTypeAffinity() { // Prefer numeric type arguments over other method signatures typeAffinity("typeaffinity1", TypeInfoFactory.shortTypeInfo, 1, DoubleWritable.class); @@ -191,6 +202,7 @@ private void verify(Class udf, String name, TypeInfo ta, TypeInfo tb, assertEquals(b, result.getParameterTypes()[1]); } + @Test public void testGetMethodInternal() { verify(TestUDF.class, "same", TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, @@ -229,6 +241,7 @@ private void common(TypeInfo a, TypeInfo b, TypeInfo result) { assertEquals(result, FunctionRegistry.getCommonClass(a,b)); } + @Test public void testCommonClass() { common(TypeInfoFactory.intTypeInfo, TypeInfoFactory.decimalTypeInfo, TypeInfoFactory.decimalTypeInfo); @@ -251,6 +264,7 @@ private void comparison(TypeInfo a, TypeInfo b, TypeInfo result) { assertEquals(result, FunctionRegistry.getCommonClassForComparison(a,b)); } + @Test public void testCommonClassComparison() { comparison(TypeInfoFactory.intTypeInfo, TypeInfoFactory.decimalTypeInfo, TypeInfoFactory.decimalTypeInfo); @@ -290,6 +304,7 @@ public void testCommonClassComparison() { /** * Method to print out the comparison/conversion behavior for data types. */ + @Test public void testPrintTypeCompatibility() { if (true) { return; @@ -337,6 +352,7 @@ private void unionAll(TypeInfo a, TypeInfo b, TypeInfo result) { assertEquals(result, FunctionRegistry.getCommonClassForUnionAll(a,b)); } + @Test public void testCommonClassUnionAll() { unionAll(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.doubleTypeInfo); @@ -364,6 +380,7 @@ public void testCommonClassUnionAll() { } + @Test public void testGetTypeInfoForPrimitiveCategory() { // varchar should take string length into account. // varchar(5), varchar(10) => varchar(10) @@ -394,10 +411,11 @@ public void testGetTypeInfoForPrimitiveCategory() { PrimitiveCategory.DOUBLE)); } - @Override - protected void tearDown() { + @After + public void tearDown() { } + @Test public void testIsRankingFunction() throws Exception { Assert.assertTrue(FunctionRegistry.isRankingFunction("rank")); Assert.assertTrue(FunctionRegistry.isRankingFunction("dense_rank")); @@ -406,6 +424,7 @@ public void testIsRankingFunction() throws Exception { Assert.assertFalse(FunctionRegistry.isRankingFunction("min")); } + @Test public void testImpliesOrder() throws Exception { Assert.assertTrue(FunctionRegistry.impliesOrder("rank")); Assert.assertTrue(FunctionRegistry.impliesOrder("dense_rank")); @@ -418,6 +437,7 @@ public void testImpliesOrder() throws Exception { Assert.assertFalse(FunctionRegistry.impliesOrder("min")); } + @Test public void testRegisterTemporaryFunctions() throws Exception { FunctionResource[] emptyResources = new FunctionResource[] {}; @@ -444,6 +464,7 @@ public void testRegisterTemporaryFunctions() throws Exception { assertFalse(functionInfo.isNative()); } + @Test public void testRegisterPermanentFunction() throws Exception { FunctionResource[] emptyResources = new FunctionResource[] {}; @@ -485,12 +506,14 @@ public void testRegisterPermanentFunction() throws Exception { assertFalse(functionInfo.isBuiltIn()); } + @Test public void testBuiltInFunction() throws Exception { FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo("ln"); assertTrue(functionInfo.isBuiltIn()); assertTrue(functionInfo.isNative()); } + @Test public void testIsPermanentFunction() throws Exception { // Setup exprNode GenericUDF udf = new GenericUDFCurrentTimestamp(); @@ -530,6 +553,7 @@ private void checkNondeterministicFn(GenericUDF udf) { assertFalse(FunctionRegistry.isConsistentWithinQuery(udf)); } + @Test public void testDeterminism() throws Exception { checkDeterministicFn(getUDF("+")); checkDeterministicFn(getUDF("ascii")); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperatorNames.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperatorNames.java index de3ecb0efe..530ff91ee1 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperatorNames.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperatorNames.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.exec; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.exec.vector.VectorAppMasterEventOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorFilterOperator; @@ -32,6 +32,9 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorSparkHashTableSinkOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorSparkPartitionPruningSinkOperator; import org.apache.hadoop.hive.ql.parse.spark.SparkPartitionPruningSinkOperator; +import static org.junit.Assert.assertEquals; +import org.junit.Before; +import org.junit.After; import org.junit.Test; /* @@ -50,20 +53,20 @@ * License for the specific language governing permissions and limitations under * the License. */ -public class TestOperatorNames extends TestCase { - public TestOperatorNames(String name) { - super(name); - } +/** + * OperatorNames Test. + */ +public class TestOperatorNames { + + @Before + public void setUp() throws Exception { - @Override - protected void setUp() throws Exception { - super.setUp(); } - @Override - protected void tearDown() throws Exception { - super.tearDown(); + @After + public void tearDown() throws Exception { + } /* diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java index c7cd4ad3f6..13a7b56965 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java @@ -62,23 +62,26 @@ import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapreduce.MRJobConfig; import org.junit.Assert; -import org.junit.Test; import org.mockito.Mockito; import static org.mockito.Mockito.when; -import junit.framework.TestCase; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import org.junit.Before; +import org.junit.Test; /** * TestOperators. * */ -public class TestOperators extends TestCase { +public class TestOperators { // this is our row to test expressions on protected InspectableObject[] r; - @Override - protected void setUp() { + @Before + public void setUp() { r = new InspectableObject[5]; ArrayList names = new ArrayList(3); names.add("col0"); @@ -126,6 +129,7 @@ private void testTaskIds(String [] taskIds, String expectedAttemptId, String exp * file naming libraries * The old test was deactivated as part of hive-405 */ + @Test public void testFileSinkOperator() throws Throwable { try { @@ -160,6 +164,7 @@ public void testFileSinkOperator() throws Throwable { * variables. But environment variables have some system limitations and we have to check * job configuration properties firstly. This test checks that staff. */ + @Test public void testScriptOperatorEnvVarsProcessing() throws Throwable { try { ScriptOperator scriptOperator = new ScriptOperator(new CompilationOpContext()); @@ -200,6 +205,7 @@ public void testScriptOperatorEnvVarsProcessing() throws Throwable { } } + @Test public void testScriptOperatorBlacklistedEnvVarsProcessing() { ScriptOperator scriptOperator = new ScriptOperator(new CompilationOpContext()); @@ -215,6 +221,7 @@ public void testScriptOperatorBlacklistedEnvVarsProcessing() { Assert.assertTrue(env.containsKey("barfoo")); } + @Test public void testScriptOperator() throws Throwable { try { System.out.println("Testing Script Operator"); @@ -289,6 +296,7 @@ public void testScriptOperator() throws Throwable { } } + @Test public void testMapOperator() throws Throwable { try { System.out.println("Testing Map Operator"); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestPartitionKeySampler.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestPartitionKeySampler.java index 206bf08647..0f993fc52e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestPartitionKeySampler.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestPartitionKeySampler.java @@ -18,11 +18,17 @@ package org.apache.hadoop.hive.ql.exec; -import junit.framework.TestCase; + import java.util.Arrays; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import org.junit.Test; -public class TestPartitionKeySampler extends TestCase { +/** + * PartitionKeySampler Test. + */ +public class TestPartitionKeySampler { private static final byte[] _100 = "100".getBytes(); private static final byte[] _200 = "200".getBytes(); @@ -32,6 +38,7 @@ // current random sampling implementation in InputSampler always returns // value of index 3, 5, 8, which can be same with previous partition key. // That induces "Split points are out of order" exception in TotalOrderPartitioner causing HIVE-7699 + @Test public void test() throws Throwable { byte[][] sampled; sampled = new byte[][] { diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java index 3aaf56145f..5d0bab3cb4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java @@ -36,14 +36,17 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.mapred.JobConf; -import junit.framework.TestCase; + +import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * TestPlan. * */ -public class TestPlan extends TestCase { +public class TestPlan { + @Test public void testPlan() throws Exception { final String F1 = "#affiliations"; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorRowObject.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorRowObject.java index 1f2b171307..92ca0f6591 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorRowObject.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorRowObject.java @@ -23,12 +23,14 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; -import junit.framework.TestCase; + +import static org.junit.Assert.fail; +import org.junit.Test; /** * Unit test for the vectorized conversion to and from row object[]. */ -public class TestVectorRowObject extends TestCase { +public class TestVectorRowObject { void examineBatch(VectorizedRowBatch batch, VectorExtractRow vectorExtractRow, Object[][] randomRows, int firstRandomRowIndex ) { @@ -98,6 +100,7 @@ void testVectorRowObject(int caseNum, boolean sort, Random r) throws HiveExcepti } } + @Test public void testVectorRowObject() throws Throwable { try { @@ -118,4 +121,4 @@ public void testVectorRowObject() throws Throwable { throw e; } } -} \ No newline at end of file +} diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSerDeRow.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSerDeRow.java index 8b1b612e3a..57bac7f180 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSerDeRow.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSerDeRow.java @@ -54,12 +54,15 @@ import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; + +import static org.junit.Assert.fail; +import static org.junit.Assert.assertTrue; +import org.junit.Test; /** * Unit test for the vectorized serialize and deserialize row. */ -public class TestVectorSerDeRow extends TestCase { +public class TestVectorSerDeRow { public static enum SerializationType { NONE, @@ -77,7 +80,7 @@ private void verifyRead( Object complexFieldObj = VectorVerifyFast.deserializeReadComplexType(deserializeRead, typeInfo); if (expectedObject == null) { if (complexFieldObj != null) { - TestCase.fail("Field reports not null but object is null (class " + complexFieldObj.getClass().getName() + + fail("Field reports not null but object is null (class " + complexFieldObj.getClass().getName() + ", " + complexFieldObj.toString() + ")"); } } else { @@ -89,12 +92,12 @@ private void verifyRead( return; } } - TestCase.fail("Field reports null but object is not null (class " + expectedObject.getClass().getName() + + fail("Field reports null but object is not null (class " + expectedObject.getClass().getName() + ", " + expectedObject.toString() + ")"); } } if (!VerifyLazy.lazyCompare(typeInfo, complexFieldObj, expectedObject)) { - TestCase.fail("Comparision failed typeInfo " + typeInfo.toString()); + fail("Comparision failed typeInfo " + typeInfo.toString()); } } } @@ -111,7 +114,7 @@ void deserializeAndVerify( TypeInfo typeInfo = typeInfos[i]; verifyRead(deserializeRead, typeInfo, expected); } - TestCase.assertTrue(deserializeRead.isEndOfInputReached()); + assertTrue(deserializeRead.isEndOfInputReached()); } void serializeBatch( @@ -555,21 +558,25 @@ void innerTestVectorDeserializeRow( } } + @Test public void testVectorBinarySortableSerializeRow() throws Throwable { Random r = new Random(8732); testVectorSerializeRow(r, SerializationType.BINARY_SORTABLE); } + @Test public void testVectorLazyBinarySerializeRow() throws Throwable { Random r = new Random(8732); testVectorSerializeRow(r, SerializationType.LAZY_BINARY); } + @Test public void testVectorLazySimpleSerializeRow() throws Throwable { Random r = new Random(8732); testVectorSerializeRow(r, SerializationType.LAZY_SIMPLE); } + @Test public void testVectorBinarySortableDeserializeRow() throws Throwable { Random r = new Random(8732); testVectorDeserializeRow(r, @@ -621,6 +628,7 @@ public void testVectorBinarySortableDeserializeRow() throws Throwable { /* useExternalBuffer */ true); } + @Test public void testVectorLazyBinaryDeserializeRow() throws Throwable { Random r = new Random(8732); testVectorDeserializeRow(r, @@ -636,6 +644,7 @@ public void testVectorLazyBinaryDeserializeRow() throws Throwable { /* useExternalBuffer */ true); } + @Test public void testVectorLazySimpleDeserializeRow() throws Throwable { Random r = new Random(8732); testVectorDeserializeRow(r, @@ -662,4 +671,4 @@ public void testVectorLazySimpleDeserializeRow() throws Throwable { /* alternate2 = unused */ false, /* useExternalBuffer */ true); } -} \ No newline at end of file +} diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/TestDebugDisplay.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/TestDebugDisplay.java index 942d1b5950..d7536e9132 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/TestDebugDisplay.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/TestDebugDisplay.java @@ -18,13 +18,15 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin; -import junit.framework.TestCase; + +import org.junit.Test; /** * Unit test for the vectorized conversion to and from row object[]. */ -public class TestDebugDisplay extends TestCase { +public class TestDebugDisplay { + @Test public void testDebugDisplay() throws Throwable { try { @@ -60,4 +62,4 @@ public void testDebugDisplay() throws Throwable { throw e; } } -} \ No newline at end of file +} diff --git ql/src/test/org/apache/hadoop/hive/ql/io/TestCombineHiveInputFormat.java ql/src/test/org/apache/hadoop/hive/ql/io/TestCombineHiveInputFormat.java index 07cef93ebb..e75412d410 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/TestCombineHiveInputFormat.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/TestCombineHiveInputFormat.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.io; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -35,42 +35,44 @@ import java.io.IOException; import java.util.LinkedHashMap; import java.util.Set; +import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * Unittest for CombineHiveInputFormat. */ -public class TestCombineHiveInputFormat extends TestCase { - public void testAvoidSplitCombination() throws Exception { - Configuration conf = new Configuration(); - JobConf job = new JobConf(conf); +public class TestCombineHiveInputFormat { + @Test + public void testAvoidSplitCombination() throws Exception { + Configuration conf = new Configuration(); + JobConf job = new JobConf(conf); - TableDesc tblDesc = Utilities.defaultTd; - tblDesc.setInputFileFormatClass(TestSkipCombineInputFormat.class); - PartitionDesc partDesc = new PartitionDesc(tblDesc, null); - LinkedHashMap pt = new LinkedHashMap<>(); - pt.put(new Path("/tmp/testfolder1"), partDesc); - pt.put(new Path("/tmp/testfolder2"), partDesc); - MapredWork mrwork = new MapredWork(); - mrwork.getMapWork().setPathToPartitionInfo(pt); - Path mapWorkPath = new Path("/tmp/" + System.getProperty("user.name"), "hive"); - Utilities.setMapRedWork(conf, mrwork, - mapWorkPath); + TableDesc tblDesc = Utilities.defaultTd; + tblDesc.setInputFileFormatClass(TestSkipCombineInputFormat.class); + PartitionDesc partDesc = new PartitionDesc(tblDesc, null); + LinkedHashMap pt = new LinkedHashMap<>(); + pt.put(new Path("/tmp/testfolder1"), partDesc); + pt.put(new Path("/tmp/testfolder2"), partDesc); + MapredWork mrwork = new MapredWork(); + mrwork.getMapWork().setPathToPartitionInfo(pt); + Path mapWorkPath = new Path("/tmp/" + System.getProperty("user.name"), "hive"); + Utilities.setMapRedWork(conf, mrwork, mapWorkPath); - try { - Path[] paths = new Path[2]; - paths[0] = new Path("/tmp/testfolder1"); - paths[1] = new Path("/tmp/testfolder2"); - CombineHiveInputFormat combineInputFormat = - ReflectionUtils.newInstance(CombineHiveInputFormat.class, conf); - combineInputFormat.pathToPartitionInfo = - Utilities.getMapWork(conf).getPathToPartitionInfo(); - Set results = combineInputFormat.getNonCombinablePathIndices(job, paths, 2); - assertEquals("Should have both path indices in the results set", 2, results.size()); - } finally { - // Cleanup the mapwork path - FileSystem.get(conf).delete(mapWorkPath, true); - } + try { + Path[] paths = new Path[2]; + paths[0] = new Path("/tmp/testfolder1"); + paths[1] = new Path("/tmp/testfolder2"); + CombineHiveInputFormat combineInputFormat = + ReflectionUtils.newInstance(CombineHiveInputFormat.class, conf); + combineInputFormat.pathToPartitionInfo = + Utilities.getMapWork(conf).getPathToPartitionInfo(); + Set results = combineInputFormat.getNonCombinablePathIndices(job, paths, 2); + assertEquals("Should have both path indices in the results set", 2, results.size()); + } finally { + // Cleanup the mapwork path + FileSystem.get(conf).delete(mapWorkPath, true); } + } public static class TestSkipCombineInputFormat extends FileInputFormat implements CombineHiveInputFormat.AvoidSplitCombination { diff --git ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java index 2a47abfab1..b9d801597f 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveBinarySearchRecordReader.java @@ -29,7 +29,7 @@ import java.util.LinkedHashMap; import junit.framework.Assert; -import junit.framework.TestCase; + import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -49,12 +49,13 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RecordReader; import org.mockito.InOrder; +import org.junit.Test; /** * TestHiveBinarySearchRecordReader. * */ -public class TestHiveBinarySearchRecordReader extends TestCase { +public class TestHiveBinarySearchRecordReader { private RCFileRecordReader rcfReader; private JobConf conf; @@ -152,6 +153,7 @@ private boolean executeDoNext(HiveContextAwareRecordReader hbsReader) throws IOE return hbsReader.next(hbsReader.createKey(), hbsReader.createValue()); } + @Test public void testNonLinearGreaterThan() throws Exception { init(); Assert.assertTrue(executeDoNext(hbsReader)); @@ -165,6 +167,7 @@ public void testNonLinearGreaterThan() throws Exception { verify(rcfReader).sync(25); } + @Test public void testNonLinearLessThan() throws Exception { init(); Assert.assertTrue(executeDoNext(hbsReader)); @@ -178,6 +181,7 @@ public void testNonLinearLessThan() throws Exception { verify(rcfReader).sync(75); } + @Test public void testNonLinearEqualTo() throws Exception { init(); Assert.assertTrue(executeDoNext(hbsReader)); @@ -191,6 +195,7 @@ public void testNonLinearEqualTo() throws Exception { verify(rcfReader).sync(25); } + @Test public void testHitLastBlock() throws Exception { init(); Assert.assertTrue(executeDoNext(hbsReader)); @@ -209,6 +214,7 @@ public void testHitLastBlock() throws Exception { Assert.assertFalse(ioContext.isBinarySearching()); } + @Test public void testHitSamePositionTwice() throws Exception { init(); Assert.assertTrue(executeDoNext(hbsReader)); @@ -225,6 +231,7 @@ public void testHitSamePositionTwice() throws Exception { Assert.assertFalse(ioContext.isBinarySearching()); } + @Test public void testResetRange() throws Exception { init(); InOrder inOrder = inOrder(rcfReader); @@ -247,6 +254,7 @@ public void testResetRange() throws Exception { Assert.assertFalse(ioContext.shouldEndBinarySearch()); } + @Test public void testEqualOpClass() throws Exception { init(); ioContext.setGenericUDFClassName(GenericUDFOPEqual.class.getName()); @@ -261,6 +269,7 @@ public void testEqualOpClass() throws Exception { Assert.assertFalse(executeDoNext(hbsReader)); } + @Test public void testLessThanOpClass() throws Exception { init(); ioContext.setGenericUDFClassName(GenericUDFOPLessThan.class.getName()); @@ -274,6 +283,7 @@ public void testLessThanOpClass() throws Exception { Assert.assertFalse(executeDoNext(hbsReader)); } + @Test public void testLessThanOrEqualOpClass() throws Exception { init(); ioContext.setGenericUDFClassName(GenericUDFOPEqualOrLessThan.class.getName()); @@ -287,6 +297,7 @@ public void testLessThanOrEqualOpClass() throws Exception { Assert.assertFalse(executeDoNext(hbsReader)); } + @Test public void testGreaterThanOpClass() throws Exception { init(); ioContext.setGenericUDFClassName(GenericUDFOPGreaterThan.class.getName()); @@ -301,6 +312,7 @@ public void testGreaterThanOpClass() throws Exception { Assert.assertTrue(executeDoNext(hbsReader)); } + @Test public void testGreaterThanOrEqualOpClass() throws Exception { init(); ioContext.setGenericUDFClassName(GenericUDFOPEqualOrGreaterThan.class.getName()); diff --git ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveFileFormatUtils.java ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveFileFormatUtils.java index db1571cdab..aeeda27210 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveFileFormatUtils.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveFileFormatUtils.java @@ -22,13 +22,19 @@ import java.util.HashMap; import java.util.Map; -import junit.framework.TestCase; + import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.plan.PartitionDesc; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestHiveFileFormatUtils extends TestCase { +/** + * TestHiveFileFormatUtils. + */ +public class TestHiveFileFormatUtils { + @Test public void testGetPartitionDescFromPathRecursively() throws IOException { PartitionDesc partDesc_3 = new PartitionDesc(); diff --git ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java index 38d64aa5bc..94c865179b 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java @@ -17,21 +17,20 @@ */ package org.apache.hadoop.hive.ql.io; -import static org.junit.Assert.assertArrayEquals; - import java.io.DataOutput; import java.io.IOException; import java.util.Random; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertTrue; import org.junit.Test; -import junit.framework.TestCase; - /** * TestHiveInputOutputBuffer. * */ -public class TestHiveInputOutputBuffer extends TestCase { +public class TestHiveInputOutputBuffer { private static final int numCases = 14; @@ -39,6 +38,7 @@ private static final String asciiLine2 = "Line two"; private static final String asciiString = asciiLine1 + "\n" + asciiLine2 + "\r\n"; + @Test public void testReadAndWrite() throws IOException { String testString = "test_hive_input_output_number_0"; byte[] string_bytes = testString.getBytes(); diff --git ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java index 36f2505c61..005d420e5a 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java @@ -24,7 +24,7 @@ import java.util.LinkedHashMap; import java.util.List; -import junit.framework.TestCase; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,12 +50,17 @@ import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.ReflectionUtils; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; /** * Unittest for SymlinkTextInputFormat. */ @SuppressWarnings("deprecation") -public class TestSymlinkTextInputFormat extends TestCase { +public class TestSymlinkTextInputFormat { private static final Logger log = LoggerFactory.getLogger(TestSymlinkTextInputFormat.class); @@ -69,8 +74,8 @@ private Path dataDir2; private Path symlinkDir; - @Override - protected void setUp() throws IOException { + @Before + public void setUp() throws IOException { conf = new Configuration(); job = new JobConf(conf); @@ -94,8 +99,8 @@ protected void setUp() throws IOException { symlinkDir = new Path(testDir, "symlinkdir"); } - @Override - protected void tearDown() throws IOException { + @After + public void tearDown() throws IOException { fileSystem.delete(testDir, true); } @@ -104,6 +109,7 @@ protected void tearDown() throws IOException { * file, and then create one symlink file containing these 2 files. Normally * without combine, it will return at least 2 splits */ + @Test public void testCombine() throws Exception { JobConf newJob = new JobConf(job); FileSystem fs = dataDir1.getFileSystem(newJob); @@ -205,6 +211,7 @@ public void testCombine() throws Exception { * Test scenario: Two data directories, one symlink file that contains two * paths each point to a file in one of data directories. */ + @Test public void testAccuracy1() throws IOException { // First data dir, contains 2 files. @@ -286,6 +293,7 @@ public void testAccuracy1() throws IOException { * * Expected: Should return empty result set without any exception. */ + @Test public void testAccuracy2() throws IOException { fileSystem.mkdirs(symlinkDir); @@ -326,6 +334,7 @@ public void testAccuracy2() throws IOException { * Scenario: No job input paths. * Expected: IOException with proper message. */ + @Test public void testFailure() { SymlinkTextInputFormat inputFormat = new SymlinkTextInputFormat(); diff --git ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java index 06f27b5091..d17222eb6b 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java @@ -15,7 +15,7 @@ import java.util.Properties; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe; @@ -35,9 +35,16 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import org.junit.Test; -public class TestParquetSerDe extends TestCase { +/** + * TestParquetSerDe. + */ +public class TestParquetSerDe { + @Test public void testParquetHiveSerDe() throws Throwable { try { // Create the SerDe @@ -85,6 +92,7 @@ public void testParquetHiveSerDe() throws Throwable { } } + @Test public void testParquetHiveSerDeComplexTypes() throws Throwable { // Initialize ParquetHiveSerDe serDe = new ParquetHiveSerDe(); diff --git ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java index 586284f04f..1f0657178a 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java @@ -16,16 +16,23 @@ import java.util.HashMap; import java.util.Map; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import org.junit.Before; import org.junit.Test; -public class TestAbstractParquetMapInspector extends TestCase { +/** + * AbstractParquetMapInspector Test. + */ +public class TestAbstractParquetMapInspector { class TestableAbstractParquetMapInspector extends AbstractParquetMapInspector { @@ -40,7 +47,7 @@ public Object getMapValueElement(Object o, Object o1) { } private TestableAbstractParquetMapInspector inspector; - @Override + @Before public void setUp() { inspector = new TestableAbstractParquetMapInspector(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.javaIntObjectInspector); diff --git ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java index d0756e482a..b788541d19 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java @@ -16,7 +16,7 @@ import java.util.HashMap; import java.util.Map; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.io.parquet.serde.primitive.ParquetPrimitiveInspectorFactory; import org.apache.hadoop.hive.serde2.io.ShortWritable; @@ -24,13 +24,19 @@ import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertEquals; +import org.junit.Before; import org.junit.Test; -public class TestDeepParquetHiveMapInspector extends TestCase { +/** + * DeepParquetHiveMapInspector Test. + */ +public class TestDeepParquetHiveMapInspector { private DeepParquetHiveMapInspector inspector; - @Override + @Before public void setUp() { inspector = new DeepParquetHiveMapInspector(ParquetPrimitiveInspectorFactory.parquetShortInspector, PrimitiveObjectInspectorFactory.javaIntObjectInspector); diff --git ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java index 1b3de041e2..8fd44f64c0 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java @@ -16,19 +16,26 @@ import java.util.ArrayList; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import org.junit.Before; import org.junit.Test; -public class TestParquetHiveArrayInspector extends TestCase { +/** + * TestParquetHiveArrayInspector. + */ +public class TestParquetHiveArrayInspector { private ParquetHiveArrayInspector inspector; - @Override + @Before public void setUp() { inspector = new ParquetHiveArrayInspector(PrimitiveObjectInspectorFactory.javaIntObjectInspector); } diff --git ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java index 477825e3f4..6a29e61964 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java @@ -23,15 +23,17 @@ import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; import junit.framework.Assert; -import junit.framework.TestCase; + +import org.junit.Test; /** * Tests util-libraries used for parquet-timestamp. */ -public class TestParquetTimestampUtils extends TestCase { +public class TestParquetTimestampUtils { + @Test public void testJulianDay() { //check if May 23, 1968 is Julian Day 2440000 Calendar cal = Calendar.getInstance(); @@ -106,6 +108,7 @@ public void testJulianDay() { Assert.assertEquals(nt2.getJulianDay() - nt1.getJulianDay(), 1464305); } + @Test public void testNanos() { //case 1: 01:01:01.0000000001 Calendar cal = Calendar.getInstance(); @@ -169,6 +172,7 @@ public void testNanos() { Assert.assertEquals(ts1, NanoTimeUtils.getTimestamp(n3, false)); } + @Test public void testTimezone() { Calendar cal = Calendar.getInstance(); cal.set(Calendar.YEAR, 1968); @@ -195,14 +199,17 @@ public void testTimezone() { Assert.assertEquals(nt.getJulianDay(), 2440001); } + @Test public void testTimezoneValues() { valueTest(false); } + @Test public void testTimezonelessValues() { valueTest(true); } + @Test public void testTimezoneless() { Timestamp ts1 = Timestamp.valueOf("2011-01-01 00:30:30.111111111"); NanoTime nt1 = NanoTimeUtils.getNanoTime(ts1, true); diff --git ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java index 583b654131..37383eca06 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java @@ -16,20 +16,26 @@ import java.util.HashMap; import java.util.Map; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertEquals; +import org.junit.Before; import org.junit.Test; -public class TestStandardParquetHiveMapInspector extends TestCase { +/** + * TestStandardParquetHiveMapInspector. + */ +public class TestStandardParquetHiveMapInspector { private StandardParquetHiveMapInspector inspector; - @Override + @Before public void setUp() { inspector = new StandardParquetHiveMapInspector(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.javaIntObjectInspector); diff --git ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java index 24eb237531..4a88551871 100644 --- ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java +++ ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java @@ -18,17 +18,23 @@ package org.apache.hadoop.hive.ql.lockmgr; -import junit.framework.TestCase; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData; import org.junit.Assert; +import org.junit.Test; -public class TestEmbeddedLockManager extends TestCase { +/** + * TestEmbeddedLockManager. + * + */ +public class TestEmbeddedLockManager { private int counter; private HiveConf conf = new HiveConf(); + @Test public void testLocking() throws LockException { HiveConf conf = new HiveConf(); conf.set("hive.lock.numretries", "0"); diff --git ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index 8d55fecbf9..230fac3ec1 100755 --- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -71,19 +71,28 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import junit.framework.TestCase; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.fail; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; /** * TestHive. * */ -public class TestHive extends TestCase { +public class TestHive { protected Hive hm; protected HiveConf hiveConf; - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { + hiveConf = new HiveConf(this.getClass()); hm = setUpImpl(hiveConf); } @@ -104,10 +113,10 @@ private static Hive setUpImpl(HiveConf hiveConf) throws Exception { } } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { try { - super.tearDown(); + // disable trash hiveConf.setFloat("fs.trash.checkpoint.interval", 30); // FS_TRASH_CHECKPOINT_INTERVAL_KEY (hadoop-2) hiveConf.setFloat("fs.trash.interval", 30); // FS_TRASH_INTERVAL_KEY (hadoop-2) @@ -121,6 +130,7 @@ protected void tearDown() throws Exception { } } + @Test public void testTable() throws Throwable { try { // create a simple table and test create, drop, get @@ -220,6 +230,7 @@ private void setNullCreateTableGrants() { * * @throws Throwable */ + @Test public void testThriftTable() throws Throwable { String tableName = "table_for_test_thrifttable"; try { @@ -263,6 +274,7 @@ public void testThriftTable() throws Throwable { * * @throws Throwable */ + @Test public void testMetaStoreApiTiming() throws Throwable { // Get the RootLogger which, if you don't have log4j2-test.properties defined, will only log ERRORs Logger logger = LogManager.getLogger("hive.ql.metadata.Hive"); @@ -372,6 +384,7 @@ private static Table createTestTable(String dbName, String tableName) throws Hiv * Test basic Hive class interaction, that: * - We can have different Hive objects throughout the lifetime of this thread. */ + @Test public void testHiveCloseCurrent() throws Throwable { Hive hive1 = Hive.get(); Hive.closeCurrent(); @@ -380,6 +393,7 @@ public void testHiveCloseCurrent() throws Throwable { assertTrue(hive1 != hive2); } + @Test public void testGetAndDropTables() throws Throwable { try { String dbName = "db_for_testgettables"; @@ -432,6 +446,7 @@ public void testGetAndDropTables() throws Throwable { } } + @Test public void testWmNamespaceHandling() throws Throwable { HiveConf hiveConf = new HiveConf(this.getClass()); Hive hm = setUpImpl(hiveConf); @@ -478,6 +493,7 @@ public void run() { hm2.alterResourcePlan("hm", changes, true, false, false); } + @Test public void testDropTableTrash() throws Throwable { if (!ShimLoader.getHadoopShims().supportTrashFeature()) { return; // it's hadoop-1 @@ -590,6 +606,7 @@ private void cleanUpTableQuietly(String dbName, String tableName) { * 2. Drop partitions with PURGE, and check that the data is moved to Trash. * @throws Exception on failure. */ + @Test public void testDropPartitionsWithPurge() throws Exception { String dbName = Warehouse.DEFAULT_DATABASE_NAME; String tableName = "table_for_testDropPartitionsWithPurge"; @@ -652,6 +669,7 @@ public void testDropPartitionsWithPurge() throws Exception { * Test that tables set up with auto-purge skip trash-directory when tables/partitions are dropped. * @throws Throwable */ + @Test public void testAutoPurgeTablesAndPartitions() throws Throwable { String dbName = Warehouse.DEFAULT_DATABASE_NAME; @@ -704,6 +722,7 @@ public void testAutoPurgeTablesAndPartitions() throws Throwable { } } + @Test public void testPartition() throws Throwable { try { String tableName = "table_for_testpartition"; @@ -752,6 +771,7 @@ public void testPartition() throws Throwable { } } + @Test public void testHiveRefreshOnConfChange() throws Throwable{ Hive prevHiveObj = Hive.get(); prevHiveObj.getDatabaseCurrent(); diff --git ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java index eccca48174..df3b2a5caf 100644 --- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java +++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveRemote.java @@ -25,6 +25,8 @@ import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; import org.apache.hadoop.util.StringUtils; +import org.junit.Before; + /** * * TestHiveRemote. @@ -36,8 +38,8 @@ /** * Start a remote metastore and initialize a Hive object pointing at it. */ - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { super.setUp(); hiveConf = new HiveConf(this.getClass()); hiveConf diff --git ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestDynamicMultiDimeCollection.java ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestDynamicMultiDimeCollection.java index f0586c28be..94a7d96a7b 100644 --- ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestDynamicMultiDimeCollection.java +++ ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestDynamicMultiDimeCollection.java @@ -21,7 +21,7 @@ import java.util.Arrays; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.parse.SemanticException; import org.junit.Assert; @@ -32,7 +32,7 @@ * Test {@link DynamicMultiDimeContainer} * */ -public class TestDynamicMultiDimeCollection extends TestCase { +public class TestDynamicMultiDimeCollection { private static String DEF_DIR = "default"; @Test diff --git ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestListBucketingPrunner.java ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestListBucketingPrunner.java index af219dff5e..ad93b4d57c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestListBucketingPrunner.java +++ ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/TestListBucketingPrunner.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.optimizer.listbucketingpruner; import junit.framework.Assert; -import junit.framework.TestCase; + import org.junit.Test; @@ -27,7 +27,7 @@ * Test {@link ListBucketingPruner} * */ -public class TestListBucketingPrunner extends TestCase { +public class TestListBucketingPrunner { @Test public void testSkipSkewedDirectory1() { diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java index d4ae06006c..e17ee2e6be 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java @@ -18,22 +18,27 @@ package org.apache.hadoop.hive.ql.parse; -import junit.framework.TestCase; + +import static org.junit.Assert.fail; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; /** * TestEximUtil. * */ -public class TestEximUtil extends TestCase { +public class TestEximUtil { - @Override - protected void setUp() { + @Before + public void setUp() { } - @Override - protected void tearDown() { + @After + public void tearDown() { } + @Test public void testCheckCompatibility() throws SemanticException { // backward/forward compatible diff --git ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java index dae98642da..e7be46c217 100644 --- ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java +++ ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java @@ -20,7 +20,7 @@ import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.CollectOperator; @@ -45,7 +45,7 @@ */ @SuppressWarnings("deprecation") @Ignore -public abstract class BaseScalarUdfTest extends TestCase { +public abstract class BaseScalarUdfTest { /** * The data from this method will be fed through the @@ -92,4 +92,4 @@ public final void testUdf() throws HiveException { OperatorTestUtils.assertResults(op, cdop, data, getExpectedResult()); } -} \ No newline at end of file +} diff --git ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java index 4cf72dd637..5b17e8addb 100644 --- ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java +++ ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java @@ -20,15 +20,17 @@ import java.util.TreeSet; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.tools.LineageInfo; +import static org.junit.Assert.fail; +import org.junit.Test; /** * TestLineageInfo. * */ -public class TestLineageInfo extends TestCase { +public class TestLineageInfo { /** * Checks whether the test outputs match the expected outputs. @@ -50,6 +52,7 @@ private void checkOutput(LineageInfo lep, TreeSet i, TreeSet o) } } + @Test public void testSimpleQuery() { LineageInfo lep = new LineageInfo(); try { @@ -67,6 +70,7 @@ public void testSimpleQuery() { } } + @Test public void testSimpleQuery2() { LineageInfo lep = new LineageInfo(); try { @@ -84,6 +88,7 @@ public void testSimpleQuery2() { } } + @Test public void testSimpleQuery3() { LineageInfo lep = new LineageInfo(); try { @@ -102,6 +107,7 @@ public void testSimpleQuery3() { } } + @Test public void testSimpleQuery4() { LineageInfo lep = new LineageInfo(); try { @@ -117,6 +123,7 @@ public void testSimpleQuery4() { } } + @Test public void testSimpleQuery5() { LineageInfo lep = new LineageInfo(); try { diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestToInteger.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestToInteger.java index 3549f21d8e..e4eb423286 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestToInteger.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestToInteger.java @@ -18,13 +18,18 @@ package org.apache.hadoop.hive.ql.udf; -import junit.framework.TestCase; + import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; import org.junit.Test; -public class TestToInteger extends TestCase{ +/** + * TestToInteger. + */ +public class TestToInteger{ @Test public void testTextToInteger() throws Exception{ diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFBase64.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFBase64.java index 7aed20373f..6a59927c3c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFBase64.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFBase64.java @@ -18,12 +18,18 @@ package org.apache.hadoop.hive.ql.udf; -import junit.framework.TestCase; + import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestUDFBase64 extends TestCase { +/** + * TestUDFBase64. + */ +public class TestUDFBase64 { + @Test public void testBase64Conversion(){ byte[] bytes = "string".getBytes(); // Let's make sure we only read the relevant part of the writable in case of reuse diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFBuildVersion.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFBuildVersion.java index d54afbdccb..cdc3bafb40 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFBuildVersion.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFBuildVersion.java @@ -21,12 +21,15 @@ import org.apache.hadoop.io.Text; import org.apache.hive.common.util.HiveVersionInfo; -import junit.framework.TestCase; + +import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * Unit Test Case for UDFBuildVersion. */ -public class TestUDFBuildVersion extends TestCase { +public class TestUDFBuildVersion { + @Test public void testVersion() { UDFBuildVersion udf = new UDFBuildVersion(); Text result = udf.evaluate(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFCrc32.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFCrc32.java index f24a0791ae..4c1e244b3a 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFCrc32.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFCrc32.java @@ -17,15 +17,23 @@ */ package org.apache.hadoop.hive.ql.udf; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestUDFCrc32 extends TestCase { +/** + * TestUDFCrc32. + */ +public class TestUDFCrc32 { + @Test public void testCrc32Str() throws HiveException { UDFCrc32 udf = new UDFCrc32(); @@ -38,6 +46,7 @@ public void testCrc32Str() throws HiveException { runAndVerifyStr(null, null, udf); } + @Test public void testCrc32Bin() throws HiveException { UDFCrc32 udf = new UDFCrc32(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java index 4770ab7868..75bf8655ed 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java @@ -24,11 +24,14 @@ import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritableV2; -import org.junit.Test; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestUDFDateFormatGranularity extends TestCase { +/** + * TestUDFDateFormatGranularity. + */ +public class TestUDFDateFormatGranularity { // Timestamp values are PST (timezone for tests is set to PST by default) diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFHex.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFHex.java index 37ad235095..f5e1fb300b 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFHex.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFHex.java @@ -18,12 +18,18 @@ package org.apache.hadoop.hive.ql.udf; -import junit.framework.TestCase; + import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestUDFHex extends TestCase { +/** + * TestUDFHex. + */ +public class TestUDFHex { + @Test public void testHexConversion(){ byte[] bytes = "string".getBytes(); // Let's make sure we only read the relevant part of the writable in case of reuse diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFMd5.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFMd5.java index d624617422..c9ca4dc0c1 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFMd5.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFMd5.java @@ -17,14 +17,20 @@ */ package org.apache.hadoop.hive.ql.udf; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestUDFMd5 extends TestCase { +/** + * TestUDFMd5. + */ +public class TestUDFMd5 { + @Test public void testMD5Str() throws HiveException { UDFMd5 udf = new UDFMd5(); @@ -34,6 +40,7 @@ public void testMD5Str() throws HiveException { runAndVerifyStr(null, null, udf); } + @Test public void testMD5Bin() throws HiveException { UDFMd5 udf = new UDFMd5(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFSha1.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFSha1.java index 3dbf30d362..4c3c399304 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFSha1.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFSha1.java @@ -17,14 +17,20 @@ */ package org.apache.hadoop.hive.ql.udf; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestUDFSha1 extends TestCase { +/** + * TestUDFSha1. + */ +public class TestUDFSha1 { + @Test public void testSha1Str() throws HiveException { UDFSha1 udf = new UDFSha1(); @@ -34,6 +40,7 @@ public void testSha1Str() throws HiveException { runAndVerifyStr(null, null, udf); } + @Test public void testSha1Bin() throws HiveException { UDFSha1 udf = new UDFSha1(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUUID.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUUID.java index 843f7bda6e..64390cc7dd 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUUID.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUUID.java @@ -18,13 +18,18 @@ package org.apache.hadoop.hive.ql.udf; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertEquals; import org.junit.Test; -public class TestUDFUUID extends TestCase { +/** + * TestUDFUUID. + */ +public class TestUDFUUID { @Test public void testUUID() throws Exception { UDFUUID udf = new UDFUUID(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUnbase64.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUnbase64.java index d8940869b6..5103ed06dd 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUnbase64.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUnbase64.java @@ -18,12 +18,18 @@ package org.apache.hadoop.hive.ql.udf; -import junit.framework.TestCase; + import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestUDFUnbase64 extends TestCase { +/** + * TestUDFUnbase64. + */ +public class TestUDFUnbase64 { + @Test public void testUnbase64Conversion(){ Text base64 = new Text(); // Let's make sure we only read the relevant part of the writable in case of reuse diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUnhex.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUnhex.java index a77ff9b970..3cf665472c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUnhex.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFUnhex.java @@ -18,11 +18,17 @@ package org.apache.hadoop.hive.ql.udf; -import junit.framework.TestCase; + import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestUDFUnhex extends TestCase { +/** + * TestUDFUnhex. + */ +public class TestUDFUnhex { + @Test public void testUnhexConversion(){ Text hex = new Text(); // Let's make sure we only read the relevant part of the writable in case of reuse diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFVersion.java ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFVersion.java index f9dd1ba99a..3825115edc 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFVersion.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFVersion.java @@ -18,12 +18,18 @@ package org.apache.hadoop.hive.ql.udf; -import junit.framework.TestCase; + import org.apache.hadoop.io.Text; import org.apache.hive.common.util.HiveVersionInfo; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestUDFVersion extends TestCase { +/** + * TestUDFVersion. + */ +public class TestUDFVersion { + @Test public void testVersion(){ UDFVersion udf = new UDFVersion(); Text result = udf.evaluate(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFCorrelation.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFCorrelation.java index 1f7ed4aaa7..66b069b409 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFCorrelation.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFCorrelation.java @@ -18,15 +18,22 @@ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDAFCorrelation extends TestCase { +/** + * TestGenericUDAFCorrelation. + * + */ +public class TestGenericUDAFCorrelation { + @Test public void testCorr() throws HiveException { GenericUDAFCorrelation corr = new GenericUDAFCorrelation(); GenericUDAFEvaluator eval1 = corr.getEvaluator( diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAbs.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAbs.java index 0bcf3f2171..a0189a5c2a 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAbs.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAbs.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -35,9 +35,16 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import org.junit.Test; -public class TestGenericUDFAbs extends TestCase { +/** + * TestGenericUDFAbs. + */ +public class TestGenericUDFAbs { + @Test public void testInt() throws HiveException { GenericUDFAbs udf = new GenericUDFAbs(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector; @@ -57,6 +64,7 @@ public void testInt() throws HiveException { assertEquals("abs() test for INT failed ", 107, output.get()); } + @Test public void testLong() throws HiveException { GenericUDFAbs udf = new GenericUDFAbs(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector; @@ -76,6 +84,7 @@ public void testLong() throws HiveException { assertEquals("abs() test for LONG failed ", 107, output.get()); } + @Test public void testDouble() throws HiveException { GenericUDFAbs udf = new GenericUDFAbs(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector; @@ -86,15 +95,16 @@ public void testDouble() throws HiveException { DeferredObject[] args = {valueObj}; DoubleWritable output = (DoubleWritable) udf.evaluate(args); - assertEquals("abs() test for Double failed ", 107.78, output.get()); + assertEquals("abs() test for Double failed ", 107.78, output.get(), 1e-15); valueObj = new DeferredJavaObject(new DoubleWritable(-107.78)); args[0] = valueObj; output = (DoubleWritable) udf.evaluate(args); - assertEquals("abs() test for Double failed ", 107.78, output.get()); + assertEquals("abs() test for Double failed ", 107.78, output.get(), 1e-15); } + @Test public void testFloat() throws HiveException { GenericUDFAbs udf = new GenericUDFAbs(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableFloatObjectInspector; @@ -116,6 +126,7 @@ public void testFloat() throws HiveException { } + @Test public void testText() throws HiveException { GenericUDFAbs udf = new GenericUDFAbs(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -141,6 +152,7 @@ public void testText() throws HiveException { assertEquals("abs() test for String failed ", null, output); } + @Test public void testHiveDecimal() throws HiveException { GenericUDFAbs udf = new GenericUDFAbs(); int prec = 12; @@ -160,14 +172,14 @@ public void testHiveDecimal() throws HiveException { HiveDecimalWritable output = (HiveDecimalWritable) udf.evaluate(args); assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal() - .doubleValue()); + .doubleValue(), 1e-15); valueObj = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("-107.123456789"))); args[0] = valueObj; output = (HiveDecimalWritable) udf.evaluate(args); assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal() - .doubleValue()); + .doubleValue(), 1e-15); // null input args[0] = new DeferredJavaObject(null); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java index 7c2ee15646..eb104bdb0c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -32,14 +32,22 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFAddMonths extends TestCase { +/** + * TestGenericUDFAddMonths. + */ +public class TestGenericUDFAddMonths { private final Text fmtTextWithTime = new Text("YYYY-MM-dd HH:mm:ss"); private final Text fmtTextWithTimeAndms = new Text("YYYY-MM-dd HH:mm:ss.SSS"); private final Text fmtTextWithoutTime = new Text("YYYY-MM-dd"); private final Text fmtTextInvalid = new Text("YYYY-abcdz"); + @Test public void testAddMonthsInt() throws HiveException { GenericUDFAddMonths udf = new GenericUDFAddMonths(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -72,6 +80,7 @@ public void testAddMonthsInt() throws HiveException { runAndVerify("2016-02-29 10:30:00", -1, fmtTextWithoutTime, "2016-01-31", udf); } + @Test public void testAddMonthsStringWithTime() throws HiveException { GenericUDFAddMonths udf = new GenericUDFAddMonths(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -87,6 +96,7 @@ public void testAddMonthsStringWithTime() throws HiveException { runAndVerify("2017-12-31 14:15:16.001", 2, fmtTextWithTime, "2018-02-28 14:15:16", udf); } + @Test public void testAddMonthsInvalidFormatter() throws HiveException { GenericUDFAddMonths udf = new GenericUDFAddMonths(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -103,6 +113,7 @@ public void testAddMonthsInvalidFormatter() throws HiveException { //test success if exception caught } } + @Test public void testAddMonthsStringWithTimeWithms() throws HiveException { GenericUDFAddMonths udf = new GenericUDFAddMonths(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -121,6 +132,7 @@ public void testAddMonthsStringWithTimeWithms() throws HiveException { runAndVerify("2017-12-31 14:15:16", 2, fmtTextWithTimeAndms, "2018-02-28 14:15:16.000", udf); } + @Test public void testAddMonthsWithNullFormatter() throws HiveException { GenericUDFAddMonths udf = new GenericUDFAddMonths(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -136,6 +148,7 @@ public void testAddMonthsWithNullFormatter() throws HiveException { runAndVerify("2017-12-31", 2, null, "2018-02-28", udf); } + @Test public void testAddMonthsTimestamp() throws HiveException { GenericUDFAddMonths udf = new GenericUDFAddMonths(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; @@ -150,6 +163,7 @@ public void testAddMonthsTimestamp() throws HiveException { runAndVerify(Timestamp.valueOf("2017-12-31 14:15:16"), 2, fmtTextWithTime, "2018-02-28 14:15:16", udf); } + @Test public void testWrongDateStr() throws HiveException { GenericUDFAddMonths udf = new GenericUDFAddMonths(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -162,6 +176,7 @@ public void testWrongDateStr() throws HiveException { runAndVerify("2014-01", 1, null, udf); } + @Test public void testWrongTsStr() throws HiveException { GenericUDFAddMonths udf = new GenericUDFAddMonths(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -176,6 +191,7 @@ public void testWrongTsStr() throws HiveException { runAndVerify("2014-01-31T10:30:00", 1, "2014-02-28", udf); } + @Test public void testAddMonthsShort() throws HiveException { GenericUDFAddMonths udf = new GenericUDFAddMonths(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -187,6 +203,7 @@ public void testAddMonthsShort() throws HiveException { runAndVerify("2014-01-14", (short) 1, "2014-02-14", udf); } + @Test public void testAddMonthsByte() throws HiveException { GenericUDFAddMonths udf = new GenericUDFAddMonths(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -198,6 +215,7 @@ public void testAddMonthsByte() throws HiveException { runAndVerify("2014-01-14", (byte) 1, "2014-02-14", udf); } + @Test public void testAddMonthsLong() throws HiveException { @SuppressWarnings("resource") GenericUDFAddMonths udf = new GenericUDFAddMonths(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCbrt.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCbrt.java index 8105220272..6de3789465 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCbrt.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCbrt.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; @@ -25,9 +25,17 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFCbrt extends TestCase { +/** + * TestGenericUDFCbrt. + */ +public class TestGenericUDFCbrt { + @Test public void testCbrt() throws HiveException { GenericUDFCbrt udf = new GenericUDFCbrt(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java index dcb4d9c53b..728e49bca7 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java @@ -32,9 +32,17 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFDate extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; +import org.junit.Test; + +/** + * TestGenericUDFDate. + */ +public class TestGenericUDFDate { + @Test public void testStringToDate() throws HiveException { GenericUDFDate udf = new GenericUDFDate(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector; @@ -53,6 +61,7 @@ public void testStringToDate() throws HiveException { assertNull("to_date() with null STRING", output); } + @Test public void testTimestampToDate() throws HiveException { GenericUDFDate udf = new GenericUDFDate(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; @@ -72,6 +81,7 @@ public void testTimestampToDate() throws HiveException { assertNull("to_date() with null TIMESTAMP", output); } + @Test public void testDateWritablepToDate() throws HiveException { GenericUDFDate udf = new GenericUDFDate(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector; @@ -90,6 +100,7 @@ public void testDateWritablepToDate() throws HiveException { assertNull("to_date() with null DATE", output); } + @Test public void testVoidToDate() throws HiveException { GenericUDFDate udf = new GenericUDFDate(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableVoidObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java index 8b45a627ad..a03a49e635 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java @@ -31,9 +31,16 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFDateAdd extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import org.junit.Test; + +/** + * TestGenericUDFDateAdd. + */ +public class TestGenericUDFDateAdd { + @Test public void testStringToDate() throws HiveException { GenericUDFDateAdd udf = new GenericUDFDateAdd(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector; @@ -59,6 +66,7 @@ public void testStringToDate() throws HiveException { assertNull("date_add() both args null", udf.evaluate(args)); } + @Test public void testTimestampToDate() throws HiveException { GenericUDFDateAdd udf = new GenericUDFDateAdd(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; @@ -85,6 +93,7 @@ public void testTimestampToDate() throws HiveException { assertNull("date_add() both args null", udf.evaluate(args)); } + @Test public void testDateWritablepToDate() throws HiveException { GenericUDFDateAdd udf = new GenericUDFDateAdd(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; @@ -111,6 +120,7 @@ public void testDateWritablepToDate() throws HiveException { assertNull("date_add() both args null", udf.evaluate(args)); } + @Test public void testByteDataTypeAsDays() throws HiveException { GenericUDFDateAdd udf = new GenericUDFDateAdd(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; @@ -126,6 +136,7 @@ public void testByteDataTypeAsDays() throws HiveException { assertEquals("date_add() test for BYTE failed ", "0109-06-24", output.toString()); } + @Test public void testShortDataTypeAsDays() throws HiveException { GenericUDFDateAdd udf = new GenericUDFDateAdd(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java index ea183d460b..86b914dcef 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java @@ -32,9 +32,16 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFDateDiff extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import org.junit.Test; + +/** + * TestGenericUDFDateDiff. + */ +public class TestGenericUDFDateDiff { + @Test public void testStringToDate() throws HiveException { GenericUDFDateDiff udf = new GenericUDFDateDiff(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector; @@ -60,6 +67,7 @@ public void testStringToDate() throws HiveException { assertNull("date_add() both args null", udf.evaluate(args)); } + @Test public void testTimestampToDate() throws HiveException { GenericUDFDateDiff udf = new GenericUDFDateDiff(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; @@ -87,6 +95,7 @@ public void testTimestampToDate() throws HiveException { assertNull("date_add() both args null", udf.evaluate(args)); } + @Test public void testDateWritablepToDate() throws HiveException { GenericUDFDateDiff udf = new GenericUDFDateDiff(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java index 6a3cdda48a..f0a5d3f19e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.Timestamp; @@ -30,9 +30,15 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFDateFormat extends TestCase { +/** + * TestGenericUDFDateFormat. + */ +public class TestGenericUDFDateFormat { + @Test public void testDateFormatStr() throws HiveException { GenericUDFDateFormat udf = new GenericUDFDateFormat(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -64,6 +70,7 @@ public void testDateFormatStr() throws HiveException { runAndVerifyStr("2015-04-12 10", fmtText, "Sunday", udf); } + @Test public void testWrongDateStr() throws HiveException { GenericUDFDateFormat udf = new GenericUDFDateFormat(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -79,6 +86,7 @@ public void testWrongDateStr() throws HiveException { runAndVerifyStr(null, fmtText, null, udf); } + @Test public void testDateFormatDate() throws HiveException { GenericUDFDateFormat udf = new GenericUDFDateFormat(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; @@ -99,6 +107,7 @@ public void testDateFormatDate() throws HiveException { runAndVerifyDate("2015-04-12", fmtText, "Sunday", udf); } + @Test public void testDateFormatTs() throws HiveException { GenericUDFDateFormat udf = new GenericUDFDateFormat(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; @@ -120,6 +129,7 @@ public void testDateFormatTs() throws HiveException { runAndVerifyTs("2015-04-12 10:30:45", fmtText, "Sunday", udf); } + @Test public void testNullFmt() throws HiveException { GenericUDFDateFormat udf = new GenericUDFDateFormat(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -133,6 +143,7 @@ public void testNullFmt() throws HiveException { runAndVerifyStr("2015-04-05", fmtText, null, udf); } + @Test public void testWrongFmt() throws HiveException { GenericUDFDateFormat udf = new GenericUDFDateFormat(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java index b70a1dc6c3..8c86eb5f8c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java @@ -31,9 +31,16 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFDateSub extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import org.junit.Test; + +/** + * TestGenericUDFDateSub. + */ +public class TestGenericUDFDateSub { + @Test public void testStringToDate() throws HiveException { GenericUDFDateSub udf = new GenericUDFDateSub(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector; @@ -59,6 +66,7 @@ public void testStringToDate() throws HiveException { assertNull("date_add() both args null", udf.evaluate(args)); } + @Test public void testTimestampToDate() throws HiveException { GenericUDFDateSub udf = new GenericUDFDateSub(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; @@ -85,6 +93,7 @@ public void testTimestampToDate() throws HiveException { assertNull("date_add() both args null", udf.evaluate(args)); } + @Test public void testDateWritablepToDate() throws HiveException { GenericUDFDateSub udf = new GenericUDFDateSub(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; @@ -111,6 +120,7 @@ public void testDateWritablepToDate() throws HiveException { assertNull("date_add() both args null", udf.evaluate(args)); } + @Test public void testByteDataTypeAsDays() throws HiveException { GenericUDFDateSub udf = new GenericUDFDateSub(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; @@ -126,6 +136,7 @@ public void testByteDataTypeAsDays() throws HiveException { assertEquals("date_add() test for BYTE failed ", "0109-06-16", output.toString()); } + @Test public void testShortDataTypeAsDays() throws HiveException { GenericUDFDateSub udf = new GenericUDFDateSub(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDecode.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDecode.java index 1efbe059a8..e02d2ffbf9 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDecode.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDecode.java @@ -20,15 +20,21 @@ import java.io.UnsupportedEncodingException; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFDecode extends TestCase { +/** + * TestGenericUDFDecode. + */ +public class TestGenericUDFDecode { + @Test public void testDecode() throws UnsupportedEncodingException, HiveException { String[] charsetNames = {"US-ASCII", "ISO-8859-1", "UTF-8", "UTF-16BE", "UTF-16LE", "UTF-16"}; for (String charsetName : charsetNames){ diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFEncode.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFEncode.java index d7884f7fc3..cfead4375f 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFEncode.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFEncode.java @@ -20,7 +20,7 @@ import java.io.UnsupportedEncodingException; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; @@ -28,8 +28,15 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.BytesWritable; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFEncode extends TestCase { +/** + * TestGenericUDFEncode. + */ +public class TestGenericUDFEncode { + @Test public void testEncode() throws UnsupportedEncodingException, HiveException{ String[] charsetNames = {"US-ASCII", "ISO-8859-1", "UTF-8", "UTF-16BE", "UTF-16LE", "UTF-16"}; for (String charsetName : charsetNames){ diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFEnforceConstraint.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFEnforceConstraint.java index a0da723e41..0509da703e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFEnforceConstraint.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFEnforceConstraint.java @@ -18,18 +18,22 @@ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.BooleanWritable; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertTrue; +import org.junit.Test; /** * Test class for {@link GenericUDFEnforceConstraint}. */ -public class TestGenericUDFEnforceConstraint extends TestCase { +public class TestGenericUDFEnforceConstraint { + @Test public void testNull() throws HiveException { try { GenericUDFEnforceConstraint udf = new GenericUDFEnforceConstraint(); @@ -47,6 +51,7 @@ public void testNull() throws HiveException { } } + @Test public void testInvalidArgumentsLength() throws HiveException { try { GenericUDFEnforceConstraint udf = new GenericUDFEnforceConstraint(); @@ -61,6 +66,7 @@ public void testInvalidArgumentsLength() throws HiveException { } } + @Test public void testCorrect() throws HiveException { GenericUDFEnforceConstraint udf = new GenericUDFEnforceConstraint(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableBooleanObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFactorial.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFactorial.java index 7378076189..fd31cbcb14 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFactorial.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFactorial.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -27,9 +27,18 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFFactorial extends TestCase { +/** + * TestGenericUDFFactorial. + */ +public class TestGenericUDFFactorial { + @Test public void testFactorial() throws HiveException { GenericUDFFactorial udf = new GenericUDFFactorial(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableIntObjectInspector; @@ -48,6 +57,7 @@ public void testFactorial() throws HiveException { runAndVerify(null, null, udf); } + @Test public void testWrongInputType() throws HiveException { @SuppressWarnings("resource") GenericUDFFactorial udf = new GenericUDFFactorial(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java index bb9918cb41..59c1c49a27 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java @@ -26,9 +26,15 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFFromUtcTimestamp extends TestCase { +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertEquals; +import org.junit.Test; + +/** + * TestGenericUDFFromUtcTimestamp. + */ +public class TestGenericUDFFromUtcTimestamp { public static void runAndVerify(GenericUDF udf, Object arg1, Object arg2, Object expected) throws HiveException { DeferredObject[] args = { new DeferredJavaObject(arg1), new DeferredJavaObject(arg2) }; @@ -41,6 +47,7 @@ public static void runAndVerify(GenericUDF udf, } } + @Test public void testFromUtcTimestamp() throws Exception { ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector; GenericUDFFromUtcTimestamp udf = new GenericUDFFromUtcTimestamp(); @@ -63,6 +70,7 @@ public void testFromUtcTimestamp() throws Exception { Timestamp.valueOf("2015-03-28 18:00:00.123456789")); } + @Test public void testToUtcTimestamp() throws Exception { ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector; GenericUDFToUtcTimestamp udf = new GenericUDFToUtcTimestamp(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java index 9787454371..dfd22783b4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -32,9 +32,16 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFGreatest extends TestCase { +/** + * TestGenericUDFGreatest. + */ +public class TestGenericUDFGreatest { + @Test public void testOneArg() throws HiveException { @SuppressWarnings("resource") GenericUDFGreatest udf = new GenericUDFGreatest(); @@ -50,6 +57,7 @@ public void testOneArg() throws HiveException { assertNotNull("greatest() test ", ex); } + @Test public void testVoids() throws HiveException { GenericUDFGreatest udf = new GenericUDFGreatest(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableVoidObjectInspector; @@ -60,6 +68,7 @@ public void testVoids() throws HiveException { runAndVerify(new Object[] { null, 1, "test"}, null, udf); } + @Test public void testGreatestMixed() throws HiveException { GenericUDFGreatest udf = new GenericUDFGreatest(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector; @@ -72,6 +81,7 @@ public void testGreatestMixed() throws HiveException { } + @Test public void testGreatestStr() throws HiveException { GenericUDFGreatest udf = new GenericUDFGreatest(); ObjectInspector[] arguments = new ObjectInspector[3]; @@ -100,6 +110,7 @@ public void testGreatestStr() throws HiveException { runAndVerify(new String[] { null, null, null }, null, udf); } + @Test public void testGreatestInt() throws HiveException { GenericUDFGreatest udf = new GenericUDFGreatest(); ObjectInspector[] arguments = new ObjectInspector[3]; @@ -122,6 +133,7 @@ public void testGreatestInt() throws HiveException { runAndVerify(new Integer[] { null, null, null }, null, udf); } + @Test public void testGreatestDouble() throws HiveException { GenericUDFGreatest udf = new GenericUDFGreatest(); ObjectInspector[] arguments = new ObjectInspector[3]; @@ -144,6 +156,7 @@ public void testGreatestDouble() throws HiveException { runAndVerify(new Double[] { null, null, null }, null, udf); } + @Test public void testGreatestDate() throws HiveException { GenericUDFGreatest udf = new GenericUDFGreatest(); ObjectInspector[] arguments = new ObjectInspector[3]; @@ -166,6 +179,7 @@ public void testGreatestDate() throws HiveException { runAndVerify(new Date[] { null, null, null }, null, udf); } + @Test public void testGreatestIntTypes() throws HiveException { GenericUDFGreatest udf = new GenericUDFGreatest(); ObjectInspector[] arguments = new ObjectInspector[4]; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFInitCap.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFInitCap.java index c871e8d7da..30907248e7 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFInitCap.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFInitCap.java @@ -25,10 +25,16 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFInitCap extends TestCase { +import static org.junit.Assert.assertEquals; +import org.junit.Test; +/** + * TestGenericUDFInitCap. + */ +public class TestGenericUDFInitCap { + + @Test public void testInitCap() throws HiveException { GenericUDFInitCap udf = new GenericUDFInitCap(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLTrim.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLTrim.java index 747f12c94c..207788b19f 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLTrim.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLTrim.java @@ -26,10 +26,16 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFLTrim extends TestCase { +import static org.junit.Assert.assertEquals; +import org.junit.Test; +/** + * TestGenericUDFLTrim. + */ +public class TestGenericUDFLTrim { + + @Test public void testTrim() throws HiveException { GenericUDFLTrim udf = new GenericUDFLTrim(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java index 972ab35311..3cbcbdb7cc 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java @@ -26,10 +26,16 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFLastDay extends TestCase { +import static org.junit.Assert.assertEquals; +import org.junit.Test; +/** + * TestGenericUDFLastDay. + */ +public class TestGenericUDFLastDay { + + @Test public void testLastDay() throws HiveException { GenericUDFLastDay udf = new GenericUDFLastDay(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -63,6 +69,7 @@ public void testLastDay() throws HiveException { runAndVerifyTs("1966-01-31 23:59:59", "1966-01-31", udf); } + @Test public void testWrongDateStr() throws HiveException { GenericUDFLastDay udf = new GenericUDFLastDay(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -76,6 +83,7 @@ public void testWrongDateStr() throws HiveException { runAndVerify(null, null, udf); } + @Test public void testWrongTsStr() throws HiveException { GenericUDFLastDay udf = new GenericUDFLastDay(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -89,6 +97,7 @@ public void testWrongTsStr() throws HiveException { runAndVerify("2016-02-28T10:30:45", null, udf); } + @Test public void testLastDayTs() throws HiveException { GenericUDFLastDay udf = new GenericUDFLastDay(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java index cccc70e0dc..fc2d9747c2 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; @@ -33,9 +33,16 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFLeast extends TestCase { +/** + * TestGenericUDFLeast. + */ +public class TestGenericUDFLeast { + @Test public void testOneArg() throws HiveException { @SuppressWarnings("resource") GenericUDFLeast udf = new GenericUDFLeast(); @@ -51,6 +58,7 @@ public void testOneArg() throws HiveException { assertNotNull("least() test ", ex); } + @Test public void testVoids() throws HiveException { GenericUDFGreatest udf = new GenericUDFGreatest(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableVoidObjectInspector; @@ -61,6 +69,7 @@ public void testVoids() throws HiveException { runAndVerify(new Object[] { null, 1, "test"}, null, udf); } + @Test public void testLeastTypes() throws HiveException { GenericUDFGreatest udf = new GenericUDFGreatest(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector; @@ -72,6 +81,7 @@ public void testLeastTypes() throws HiveException { runAndVerify(new Object[] { 1, 11.1, Date.valueOf("2015-03-20"), "test"}, "test", udf); //string comparisons } + @Test public void testLeastStr() throws HiveException { GenericUDFLeast udf = new GenericUDFLeast(); ObjectInspector[] arguments = new ObjectInspector[3]; @@ -100,6 +110,7 @@ public void testLeastStr() throws HiveException { runAndVerify(new String[] { null, null, null }, null, udf); } + @Test public void testLeastInt() throws HiveException { GenericUDFLeast udf = new GenericUDFLeast(); ObjectInspector[] arguments = new ObjectInspector[3]; @@ -122,6 +133,7 @@ public void testLeastInt() throws HiveException { runAndVerify(new Integer[] { null, null, null }, null, udf); } + @Test public void testLeastDouble() throws HiveException { GenericUDFLeast udf = new GenericUDFLeast(); ObjectInspector[] arguments = new ObjectInspector[3]; @@ -144,6 +156,7 @@ public void testLeastDouble() throws HiveException { runAndVerify(new Double[] { null, null, null }, null, udf); } + @Test public void testLeastDate() throws HiveException { GenericUDFLeast udf = new GenericUDFLeast(); ObjectInspector[] arguments = new ObjectInspector[3]; @@ -166,6 +179,7 @@ public void testLeastDate() throws HiveException { runAndVerify(new Date[] { null, null, null }, null, udf); } + @Test public void testLeastIntTypes() throws HiveException { GenericUDFLeast udf = new GenericUDFLeast(); ObjectInspector[] arguments = new ObjectInspector[4]; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLevenshtein.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLevenshtein.java index 5e43386368..79d4694dc8 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLevenshtein.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLevenshtein.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -28,9 +28,18 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import org.junit.Test; -public class TestGenericUDFLevenshtein extends TestCase { +/** + * TestGenericUDFLevenshtein. + */ +public class TestGenericUDFLevenshtein { + @Test public void testLevenshtein() throws HiveException { GenericUDFLevenshtein udf = new GenericUDFLevenshtein(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -50,6 +59,7 @@ public void testLevenshtein() throws HiveException { runAndVerify(null, null, null, udf); } + @Test public void testLevenshteinWrongType0() throws HiveException { @SuppressWarnings("resource") GenericUDFLevenshtein udf = new GenericUDFLevenshtein(); @@ -66,6 +76,7 @@ public void testLevenshteinWrongType0() throws HiveException { } } + @Test public void testLevenshteinWrongType1() throws HiveException { @SuppressWarnings("resource") GenericUDFLevenshtein udf = new GenericUDFLevenshtein(); @@ -82,6 +93,7 @@ public void testLevenshteinWrongType1() throws HiveException { } } + @Test public void testLevenshteinWrongLength() throws HiveException { @SuppressWarnings("resource") GenericUDFLevenshtein udf = new GenericUDFLevenshtein(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLpad.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLpad.java index 52c7ad5b06..ad8f3e2db3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLpad.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLpad.java @@ -27,10 +27,17 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFLpad extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import org.junit.Test; +/** + * TestGenericUDFLpad. + */ +public class TestGenericUDFLpad { + + @Test public void testLpad() throws HiveException { GenericUDFLpad udf = new GenericUDFLpad(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java index e9f32a1397..85e9057c93 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java @@ -31,10 +31,18 @@ import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFMonthsBetween extends TestCase { +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import org.junit.Test; +/** + * TestGenericUDFMonthsBetween. + */ +public class TestGenericUDFMonthsBetween { + + @Test public void testMonthsBetweenForString() throws HiveException { // Default run GenericUDFMonthsBetween udf = new GenericUDFMonthsBetween(); @@ -43,7 +51,7 @@ public void testMonthsBetweenForString() throws HiveException { ObjectInspector[] arguments = { valueOI1, valueOI2 }; udf.initialize(arguments); - testMonthsBetweenForString(udf); + testMonthsBetweenForStringM(udf); // Run without round-off GenericUDFMonthsBetween udfWithoutRoundOff = new GenericUDFMonthsBetween(); @@ -55,9 +63,10 @@ public void testMonthsBetweenForString() throws HiveException { ObjectInspector[] args = { vOI1, vOI2, vOI3 }; udfWithoutRoundOff.initialize(args); - testMonthsBetweenForString(udf); + testMonthsBetweenForStringM(udf); } + @Test public void testWrongDateStr() throws HiveException { GenericUDFMonthsBetween udf = new GenericUDFMonthsBetween(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -69,7 +78,7 @@ public void testWrongDateStr() throws HiveException { runTestStr("2002-03-24", "2002-02", null, udf); } - public void testMonthsBetweenForString(GenericUDFMonthsBetween udf) throws HiveException { + public void testMonthsBetweenForStringM(GenericUDFMonthsBetween udf) throws HiveException { // test month diff with fraction considering time components runTestStr("1995-02-02", "1995-01-01", 1.03225806, udf); runTestStr("2003-07-17", "2005-07-06", -23.64516129, udf); @@ -126,6 +135,7 @@ public void testMonthsBetweenForString(GenericUDFMonthsBetween udf) throws HiveE + @Test public void testMonthsBetweenForTimestamp() throws HiveException { GenericUDFMonthsBetween udf = new GenericUDFMonthsBetween(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; @@ -133,7 +143,7 @@ public void testMonthsBetweenForTimestamp() throws HiveException { ObjectInspector[] arguments = { valueOI1, valueOI2 }; udf.initialize(arguments); - testMonthsBetweenForTimestamp(udf); + testMonthsBetweenForTimestampM(udf); // Run without round-off GenericUDFMonthsBetween udfWithoutRoundOff = new GenericUDFMonthsBetween(); @@ -145,10 +155,10 @@ public void testMonthsBetweenForTimestamp() throws HiveException { ObjectInspector[] args = { vOI1, vOI2, vOI3 }; udfWithoutRoundOff.initialize(args); - testMonthsBetweenForTimestamp(udfWithoutRoundOff); + testMonthsBetweenForTimestampM(udfWithoutRoundOff); } - public void testMonthsBetweenForTimestamp(GenericUDFMonthsBetween udf) throws HiveException { + public void testMonthsBetweenForTimestampM(GenericUDFMonthsBetween udf) throws HiveException { // test month diff with fraction considering time components runTestTs("1995-02-02 00:00:00", "1995-01-01 00:00:00", 1.03225806, udf); runTestTs("2003-07-17 00:00:00", "2005-07-06 00:00:00", -23.64516129, udf); @@ -183,6 +193,7 @@ public void testMonthsBetweenForTimestamp(GenericUDFMonthsBetween udf) throws Hi runTestTs("2003-04-23 23:59:59", "2003-03-24 00:00:00", 0.99999963, udf); } + @Test public void testMonthsBetweenForDate() throws HiveException { GenericUDFMonthsBetween udf = new GenericUDFMonthsBetween(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; @@ -190,7 +201,7 @@ public void testMonthsBetweenForDate() throws HiveException { ObjectInspector[] arguments = { valueOI1, valueOI2 }; udf.initialize(arguments); - testMonthsBetweenForDate(udf); + testMonthsBetweenForDateM(udf); // Run without round-off GenericUDFMonthsBetween udfWithoutRoundOff = new GenericUDFMonthsBetween(); @@ -202,10 +213,10 @@ public void testMonthsBetweenForDate() throws HiveException { ObjectInspector[] args = { vOI1, vOI2, vOI3 }; udfWithoutRoundOff.initialize(args); - testMonthsBetweenForDate(udfWithoutRoundOff); + testMonthsBetweenForDateM(udfWithoutRoundOff); } - public void testMonthsBetweenForDate(GenericUDFMonthsBetween udf) throws HiveException { + public void testMonthsBetweenForDateM(GenericUDFMonthsBetween udf) throws HiveException { // test month diff with fraction considering time components runTestDt("1995-02-02", "1995-01-01", 1.03225806, udf); runTestDt("2003-07-17", "2005-07-06", -23.64516129, udf); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java index c211fddf76..4acfe612e4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -26,9 +26,16 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFNextDay extends TestCase { +/** + * TestGenericUDFNextDay. + */ +public class TestGenericUDFNextDay { + @Test public void testNextDay() throws HiveException { GenericUDFNextDay udf = new GenericUDFNextDay(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -70,6 +77,7 @@ public void testNextDay() throws HiveException { runAndVerify(null, null, null, udf); } + @Test public void testNotValidValues() throws Exception { GenericUDFNextDay udf = new GenericUDFNextDay(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -88,6 +96,7 @@ public void testNotValidValues() throws Exception { runAndVerify("2015-01-14T14:04:34", "SAT", null, udf); } + @Test public void testNextDayErrorArg1() throws HiveException { @SuppressWarnings("resource") GenericUDFNextDay udf = new GenericUDFNextDay(); @@ -105,6 +114,7 @@ public void testNextDayErrorArg1() throws HiveException { } } + @Test public void testNextDayErrorArg2() throws HiveException { @SuppressWarnings("resource") GenericUDFNextDay udf = new GenericUDFNextDay(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java index 5e6c14e2b9..0dcc41aeba 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.Timestamp; @@ -30,9 +30,17 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFQuarter extends TestCase { +/** + * TestGenericUDFQuarter. + */ +public class TestGenericUDFQuarter { + @Test public void testQuarterStr() throws HiveException { GenericUDFQuarter udf = new GenericUDFQuarter(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -81,6 +89,7 @@ public void testQuarterStr() throws HiveException { runAndVerifyStr("1966-12-31 23:59:59.999", 4, udf); } + @Test public void testWrongDateStr() throws HiveException { GenericUDFQuarter udf = new GenericUDFQuarter(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -94,6 +103,7 @@ public void testWrongDateStr() throws HiveException { runAndVerifyStr(null, null, udf); } + @Test public void testQuarterDt() throws HiveException { GenericUDFQuarter udf = new GenericUDFQuarter(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; @@ -120,6 +130,7 @@ public void testQuarterDt() throws HiveException { runAndVerifyDt("1966-12-31", 4, udf); } + @Test public void testQuarterTs() throws HiveException { GenericUDFQuarter udf = new GenericUDFQuarter(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRTrim.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRTrim.java index 56356e4cad..84eae07621 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRTrim.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRTrim.java @@ -26,10 +26,16 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFRTrim extends TestCase { +import static org.junit.Assert.assertEquals; +import org.junit.Test; +/** + * TestGenericUDFRTrim. + */ +public class TestGenericUDFRTrim { + + @Test public void testTrim() throws HiveException { GenericUDFRTrim udf = new GenericUDFRTrim(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRegexp.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRegexp.java index 1cc0fa6c1a..d6abd20b24 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRegexp.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRegexp.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; @@ -27,9 +27,17 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFRegexp extends TestCase { +/** + * TestGenericUDFRegexp. + */ +public class TestGenericUDFRegexp { + @Test public void testConstant() throws HiveException { GenericUDFRegExp udf = new GenericUDFRegExp(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -48,6 +56,7 @@ public void testConstant() throws HiveException { runAndVerifyConst(null, regexText, null, udf); } + @Test public void testEmptyConstant() throws HiveException { GenericUDFRegExp udf = new GenericUDFRegExp(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -65,6 +74,7 @@ public void testEmptyConstant() throws HiveException { runAndVerifyConst(null, regexText, null, udf); } + @Test public void testNullConstant() throws HiveException { GenericUDFRegExp udf = new GenericUDFRegExp(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -81,6 +91,7 @@ public void testNullConstant() throws HiveException { runAndVerifyConst(null, regexText, null, udf); } + @Test public void testNonConstant() throws HiveException { GenericUDFRegExp udf = new GenericUDFRegExp(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFReplace.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFReplace.java index 2cf05b3e0c..df0056cb95 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFReplace.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFReplace.java @@ -17,13 +17,22 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.UDFReplace; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFReplace extends TestCase { +/** + * TestGenericUDFReplace. + * + */ +public class TestGenericUDFReplace { + @Test public void testReplace() throws HiveException { UDFReplace udf = new UDFReplace(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRpad.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRpad.java index 967f798ef6..0007a8172d 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRpad.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFRpad.java @@ -25,10 +25,17 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFRpad extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import org.junit.Test; +/** + * TestGenericUDFRpad. + */ +public class TestGenericUDFRpad { + + @Test public void testRpad() throws HiveException { GenericUDFRpad udf = new GenericUDFRpad(); ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSha2.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSha2.java index 777dfa96f6..66f2abc086 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSha2.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSha2.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; @@ -28,9 +28,15 @@ import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFSha2 extends TestCase { +/** + * TestGenericUDFSha2. + */ +public class TestGenericUDFSha2 { + @Test public void testSha0Str() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -49,6 +55,7 @@ public void testSha0Str() throws HiveException { runAndVerifyStr(null, lenWr, null, udf); } + @Test public void testSha0Bin() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector; @@ -67,6 +74,7 @@ public void testSha0Bin() throws HiveException { runAndVerifyBin(null, lenWr, null, udf); } + @Test public void testSha200Str() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -80,6 +88,7 @@ public void testSha200Str() throws HiveException { runAndVerifyStr("ABC", lenWr, null, udf); } + @Test public void testSha200Bin() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector; @@ -93,6 +102,7 @@ public void testSha200Bin() throws HiveException { runAndVerifyBin(new byte[] { 65, 66, 67 }, lenWr, null, udf); } + @Test public void testSha256Str() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -111,6 +121,7 @@ public void testSha256Str() throws HiveException { runAndVerifyStr(null, lenWr, null, udf); } + @Test public void testSha256Bin() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector; @@ -129,6 +140,7 @@ public void testSha256Bin() throws HiveException { runAndVerifyBin(null, lenWr, null, udf); } + @Test public void testSha384Str() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -153,6 +165,7 @@ public void testSha384Str() throws HiveException { runAndVerifyStr(null, lenWr, null, udf); } + @Test public void testSha384Bin() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector; @@ -177,6 +190,7 @@ public void testSha384Bin() throws HiveException { runAndVerifyBin(null, lenWr, null, udf); } + @Test public void testSha512Str() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -201,6 +215,7 @@ public void testSha512Str() throws HiveException { runAndVerifyStr(null, lenWr, null, udf); } + @Test public void testSha512Bin() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector; @@ -225,6 +240,7 @@ public void testSha512Bin() throws HiveException { runAndVerifyBin(null, lenWr, null, udf); } + @Test public void testShaNullStr() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -238,6 +254,7 @@ public void testShaNullStr() throws HiveException { runAndVerifyStr("ABC", lenWr, null, udf); } + @Test public void testShaNullBin() throws HiveException { GenericUDFSha2 udf = new GenericUDFSha2(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSoundex.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSoundex.java index e35f2d85bb..c6a4f703df 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSoundex.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSoundex.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -27,9 +27,18 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import org.junit.Test; -public class TestGenericUDFSoundex extends TestCase { +/** + * TestGenericUDFSoundex. + */ +public class TestGenericUDFSoundex { + @Test public void testSoundex() throws HiveException { GenericUDFSoundex udf = new GenericUDFSoundex(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -53,6 +62,7 @@ public void testSoundex() throws HiveException { runAndVerify("\u3500\u3501\u3502\u3503", null, udf); } + @Test public void testSoundexWrongType0() throws HiveException { @SuppressWarnings("resource") GenericUDFSoundex udf = new GenericUDFSoundex(); @@ -68,6 +78,7 @@ public void testSoundexWrongType0() throws HiveException { } } + @Test public void testSoundexWrongLength() throws HiveException { @SuppressWarnings("resource") GenericUDFSoundex udf = new GenericUDFSoundex(); diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSubstringIndex.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSubstringIndex.java index 22ee3d1119..31b22829cc 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSubstringIndex.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSubstringIndex.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import junit.framework.TestCase; + import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; @@ -27,9 +27,15 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestGenericUDFSubstringIndex extends TestCase { +/** + * TestGenericUDFSubstringIndex. + */ +public class TestGenericUDFSubstringIndex { + @Test public void testSubstringIndex() throws HiveException { GenericUDFSubstringIndex udf = new GenericUDFSubstringIndex(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; @@ -62,6 +68,7 @@ public void testSubstringIndex() throws HiveException { runAndVerify("www.apache.org", ".", null, null, udf); } + @Test public void testSubstringIndexConst() throws HiveException { GenericUDFSubstringIndex udf = new GenericUDFSubstringIndex(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java index 61623d54c9..45b60dbd40 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java @@ -30,9 +30,15 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFToUnixTimestamp extends TestCase { +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertEquals; +import org.junit.Test; + +/** + * TestGenericUDFToUnixTimestamp. + */ +public class TestGenericUDFToUnixTimestamp { public static void runAndVerify(GenericUDFToUnixTimeStamp udf, Object arg, Object expected) throws HiveException { @@ -56,6 +62,7 @@ public static void runAndVerify(GenericUDFToUnixTimeStamp udf, } } + @Test public void testTimestamp() throws HiveException { GenericUDFToUnixTimeStamp udf = new GenericUDFToUnixTimeStamp(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; @@ -76,6 +83,7 @@ public void testTimestamp() throws HiveException { runAndVerify(udf, null, null); } + @Test public void testDate() throws HiveException { GenericUDFToUnixTimeStamp udf = new GenericUDFToUnixTimeStamp(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector; @@ -91,6 +99,7 @@ public void testDate() throws HiveException { runAndVerify(udf, null, null); } + @Test public void testString() throws HiveException { GenericUDFToUnixTimeStamp udf1 = new GenericUDFToUnixTimeStamp(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrim.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrim.java index 7d6ecb29d3..414425f0f9 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrim.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrim.java @@ -26,10 +26,16 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFTrim extends TestCase { +import static org.junit.Assert.assertEquals; +import org.junit.Test; +/** + * TestGenericUDFTrim. + */ +public class TestGenericUDFTrim { + + @Test public void testTrim() throws HiveException { GenericUDFTrim udf = new GenericUDFTrim(); ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector; diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java index 86511a68a8..f82420e85c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java @@ -29,10 +29,16 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestGenericUDFTrunc extends TestCase { +import static org.junit.Assert.assertEquals; +import org.junit.Test; +/** + * TestGenericUDFTrunc. + */ +public class TestGenericUDFTrunc { + + @Test public void testStringToDateWithMonthFormat() throws HiveException { GenericUDFTrunc udf = new GenericUDFTrunc(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.javaStringObjectInspector; @@ -111,6 +117,7 @@ public void testStringToDateWithMonthFormat() throws HiveException { runAndVerify("2016-02-01", udf, initArgs, evalArgs); } + @Test public void testStringToDateWithQuarterFormat() throws HiveException { GenericUDFTrunc udf = new GenericUDFTrunc(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.javaStringObjectInspector; @@ -213,6 +220,7 @@ public void testStringToDateWithQuarterFormat() throws HiveException { runAndVerify("2016-10-01", udf, initArgs, evalArgs); } + @Test public void testStringToDateWithYearFormat() throws HiveException { GenericUDFTrunc udf = new GenericUDFTrunc(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.javaStringObjectInspector; @@ -291,6 +299,7 @@ public void testStringToDateWithYearFormat() throws HiveException { runAndVerify("2016-01-01", udf, initArgs, evalArgs); } + @Test public void testTimestampToDateWithMonthFormat() throws HiveException { GenericUDFTrunc udf = new GenericUDFTrunc(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; @@ -385,6 +394,7 @@ public void testTimestampToDateWithMonthFormat() throws HiveException { runAndVerify("2016-02-01", udf, initArgs, evalArgs); } + @Test public void testTimestampToDateWithQuarterFormat() throws HiveException { GenericUDFTrunc udf = new GenericUDFTrunc(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; @@ -509,6 +519,7 @@ public void testTimestampToDateWithQuarterFormat() throws HiveException { runAndVerify("2016-10-01", udf, initArgs, evalArgs); } + @Test public void testTimestampToDateWithYearFormat() throws HiveException { GenericUDFTrunc udf = new GenericUDFTrunc(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; @@ -603,6 +614,7 @@ public void testTimestampToDateWithYearFormat() throws HiveException { runAndVerify("2016-01-01", udf, initArgs, evalArgs); } + @Test public void testDateWritableToDateWithMonthFormat() throws HiveException { GenericUDFTrunc udf = new GenericUDFTrunc(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; @@ -648,6 +660,7 @@ public void testDateWritableToDateWithMonthFormat() throws HiveException { runAndVerify("2016-02-01", udf, initArgs, evalArgs); } + @Test public void testDateWritableToDateWithQuarterFormat() throws HiveException { GenericUDFTrunc udf = new GenericUDFTrunc(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; @@ -705,6 +718,7 @@ public void testDateWritableToDateWithQuarterFormat() throws HiveException { runAndVerify("2016-10-01", udf, initArgs, evalArgs); } + @Test public void testDateWritableToDateWithYearFormat() throws HiveException { GenericUDFTrunc udf = new GenericUDFTrunc(); ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDateObjectInspector; diff --git serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java index 66b810a93b..be2d09bad3 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java +++ serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java @@ -23,7 +23,7 @@ import java.util.Properties; import java.util.Random; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -45,17 +45,20 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; +import org.junit.Test; -public class TestStatsSerde extends TestCase { - - public TestStatsSerde(String name) { - super(name); - } +/** + * StatsSerde Test. + */ +public class TestStatsSerde { /** * Test LazySimpleSerDe */ + @Test public void testLazySimpleSerDe() throws Throwable { try { // Create the SerDe @@ -100,6 +103,7 @@ private void deserializeAndSerializeLazySimple(LazySimpleSerDe serDe, Text t) * Test LazyBinarySerDe */ + @Test public void testLazyBinarySerDe() throws Throwable { try { System.out.println("test: testLazyBinarySerDe"); @@ -165,6 +169,7 @@ private void deserializeAndSerializeLazyBinary(AbstractSerDe serDe, Object[] row * Test ColumnarSerDe */ + @Test public void testColumnarSerDe() throws Throwable { try { System.out.println("test: testColumnarSerde"); diff --git serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java index 9d72a1a07e..177944cef9 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java +++ serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java @@ -20,7 +20,7 @@ import java.util.Properties; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; @@ -32,16 +32,22 @@ import org.apache.thrift.transport.TMemoryBuffer; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import org.junit.Test; /** * TestTCTLSeparatedProtocol. * */ -public class TestTCTLSeparatedProtocol extends TestCase { +public class TestTCTLSeparatedProtocol { public TestTCTLSeparatedProtocol() throws Exception { } + @Test public void testReads() throws Exception { TMemoryBuffer trans = new TMemoryBuffer(1024); String foo = "Hello"; @@ -114,6 +120,7 @@ public void testReads() throws Exception { prot.readStructEnd(); } + @Test public void testWrites() throws Exception { TMemoryBuffer trans = new TMemoryBuffer(1024); TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 1024); @@ -236,6 +243,7 @@ public void testWrites() throws Exception { prot.readStructEnd(); } + @Test public void testQuotedWrites() throws Exception { TMemoryBuffer trans = new TMemoryBuffer(4096); TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 4096); @@ -308,6 +316,7 @@ public void testQuotedWrites() throws Exception { * with a more TRegexLike protocol, but for this case, TCTLSeparatedProtocol * can do it. */ + @Test public void test1ApacheLogFormat() throws Exception { final String sample = "127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326"; @@ -382,6 +391,7 @@ public void test1ApacheLogFormat() throws Exception { prot.readStructEnd(); } + @Test public void testNulls() throws Exception { TMemoryBuffer trans = new TMemoryBuffer(1024); TCTLSeparatedProtocol prot = new TCTLSeparatedProtocol(trans, 10); @@ -476,6 +486,7 @@ public void testNulls() throws Exception { assertTrue(ret1 == 0); } + @Test public void testShouldThrowRunTimeExceptionIfUnableToInitializeTokenizer() throws Exception { TCTLSeparatedProtocol separatedProtocol = new TCTLSeparatedProtocol(new TTransport() { @Override diff --git serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java index 62741d38d0..7cda197ddd 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java +++ serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java @@ -26,6 +26,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.serde2.ByteStream.Output; +import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerdeRandomRowSource; import org.apache.hadoop.hive.serde2.VerifyFast; @@ -39,15 +40,21 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.io.BytesWritable; -import junit.framework.TestCase; + import org.junit.Assert; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertTrue; +import org.junit.Test; -public class TestBinarySortableFast extends TestCase { +/** + * BinarySortableFast Test. + */ +public class TestBinarySortableFast { private static String debugDetailedReadPositionString; private static StackTraceElement[] debugStackTrace; - private void testBinarySortableFast( + private void testBinarySortableFastM( SerdeRandomRowSource source, Object[][] rows, boolean[] columnSortOrderIsDesc, byte[] columnNullMarker, byte[] columnNotNullMarker, AbstractSerDe serde, StructObjectInspector rowOI, @@ -78,44 +85,34 @@ private void testBinarySortableFast( // One Writable per row. BytesWritable serializeWriteBytes[] = new BytesWritable[rowCount]; - int[][] perFieldWriteLengthsArray = new int[rowCount][]; - for (int i = 0; i < rowCount; i++) { - Object[] row = rows[i]; - Output output = new Output(); - binarySortableSerializeWrite.set(output); + serializeWrite(rows, ascending, typeInfos, rowCount, columnCount, writeColumnCount, binarySortableSerializeWrite, + serializeWriteBytes, perFieldWriteLengthsArray); - int[] perFieldWriteLengths = new int[columnCount]; - for (int index = 0; index < writeColumnCount; index++) { - VerifyFast.serializeWrite(binarySortableSerializeWrite, typeInfos[index], row[index]); - perFieldWriteLengths[index] = output.getLength(); - } - perFieldWriteLengthsArray[i] = perFieldWriteLengths; - BytesWritable bytesWritable = new BytesWritable(); - bytesWritable.set(output.getData(), 0, output.getLength()); - serializeWriteBytes[i] = bytesWritable; - if (i > 0) { - BytesWritable previousBytesWritable = serializeWriteBytes[i - 1]; - int compareResult = previousBytesWritable.compareTo(bytesWritable); - if ((compareResult < 0 && !ascending) - || (compareResult > 0 && ascending)) { - System.out.println("Test failed in " - + (ascending ? "ascending" : "descending") + " order with " - + (i - 1) + " and " + i); - System.out.println("serialized data [" + (i - 1) + "] = " - + TestBinarySortableSerDe.hexString(serializeWriteBytes[i - 1])); - System.out.println("serialized data [" + i + "] = " - + TestBinarySortableSerDe.hexString(serializeWriteBytes[i])); - fail("Sort order of serialized " + (i - 1) + " and " + i - + " are reversed!"); - } - } - } + // Try to deserialize using DeserializeRead our Writable row objects created by SerializeWrite. + deserializeRead(rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, typeInfos, useIncludeColumns, + rowCount, columnCount, columnsToInclude, writeColumnCount, serializeWriteBytes); + + // Try to deserialize using SerDe class our Writable row objects created by SerializeWrite. + deserializeSerDe(rows, serde, serde_fewer, doWriteFewerColumns, rowCount, writeColumnCount, serializeWriteBytes); + // One Writable per row. + BytesWritable serdeBytes[] = new BytesWritable[rowCount]; - // Try to deserialize using DeserializeRead our Writable row objects created by SerializeWrite. - for (int i = 0; i < rowCount; i++) { + // Serialize using the SerDe, then below deserialize using DeserializeRead. + serializeSerDe(rows, serde, rowOI, serde_fewer, typeInfos, doWriteFewerColumns, rowCount, serializeWriteBytes, + perFieldWriteLengthsArray, serdeBytes); + + // Try to deserialize using DeserializeRead our Writable row objects created by SerDe. + deserializeReadSerDe(rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, typeInfos, + useIncludeColumns, rowCount, columnCount, columnsToInclude, writeColumnCount, serdeBytes); + } + +private void deserializeReadSerDe(Object[][] rows, boolean[] columnSortOrderIsDesc, byte[] columnNullMarker, + byte[] columnNotNullMarker, TypeInfo[] typeInfos, boolean useIncludeColumns, int rowCount, int columnCount, + boolean[] columnsToInclude, int writeColumnCount, BytesWritable[] serdeBytes) throws IOException { + for (int i = 0; i < rowCount; i++) { Object[] row = rows[i]; BinarySortableDeserializeRead binarySortableDeserializeRead = new BinarySortableDeserializeRead( @@ -125,100 +122,31 @@ private void testBinarySortableFast( columnNullMarker, columnNotNullMarker); - BytesWritable bytesWritable = serializeWriteBytes[i]; - binarySortableDeserializeRead.set( - bytesWritable.getBytes(), 0, bytesWritable.getLength()); + + BytesWritable bytesWritable = serdeBytes[i]; + binarySortableDeserializeRead.set(bytesWritable.getBytes(), 0, bytesWritable.getLength()); for (int index = 0; index < columnCount; index++) { if (useIncludeColumns && !columnsToInclude[index]) { binarySortableDeserializeRead.skipNextField(); } else if (index >= writeColumnCount) { // Should come back a null. - VerifyFast.verifyDeserializeRead(binarySortableDeserializeRead, typeInfos[index], null); + verifyRead(binarySortableDeserializeRead, typeInfos[index], null); } else { verifyRead(binarySortableDeserializeRead, typeInfos[index], row[index]); } } if (writeColumnCount == columnCount) { - TestCase.assertTrue(binarySortableDeserializeRead.isEndOfInputReached()); - } - - /* - * Clip off one byte and expect to get an EOFException on the write field. - */ - BinarySortableDeserializeRead binarySortableDeserializeRead2 = - new BinarySortableDeserializeRead( - typeInfos, - /* useExternalBuffer */ false, - columnSortOrderIsDesc, - columnNullMarker, - columnNotNullMarker); - - binarySortableDeserializeRead2.set( - bytesWritable.getBytes(), 0, bytesWritable.getLength() - 1); // One fewer byte. - - for (int index = 0; index < writeColumnCount; index++) { - if (index == writeColumnCount - 1) { - boolean threw = false; - try { - verifyRead(binarySortableDeserializeRead2, typeInfos[index], row[index]); - } catch (EOFException e) { -// debugDetailedReadPositionString = binarySortableDeserializeRead2.getDetailedReadPositionString(); -// debugStackTrace = e.getStackTrace(); - threw = true; - } - - if (!threw && row[index] != null) { - Assert.fail(); - } - } else { - if (useIncludeColumns && !columnsToInclude[index]) { - binarySortableDeserializeRead2.skipNextField(); - } else { - verifyRead(binarySortableDeserializeRead2, typeInfos[index], row[index]); - } - } - } - - } - - // Try to deserialize using SerDe class our Writable row objects created by SerializeWrite. - for (int i = 0; i < rowCount; i++) { - BytesWritable bytesWritable = serializeWriteBytes[i]; - - // Note that regular SerDe doesn't tolerate fewer columns. - List deserializedRow; - if (doWriteFewerColumns) { - deserializedRow = (List) serde_fewer.deserialize(bytesWritable); - } else { - deserializedRow = (List) serde.deserialize(bytesWritable); - } - - Object[] row = rows[i]; - for (int index = 0; index < writeColumnCount; index++) { - Object expected = row[index]; - Object object = deserializedRow.get(index); - if (expected == null || object == null) { - if (expected != null || object != null) { - fail("SerDe deserialized NULL column mismatch"); - } - } else { - if (!object.equals(expected)) { - fail("SerDe deserialized value does not match (expected " + - expected.getClass().getName() + " " + - expected.toString() + ", actual " + - object.getClass().getName() + " " + - object.toString() + ")"); - } - } + assertTrue(binarySortableDeserializeRead.isEndOfInputReached()); } } +} - // One Writable per row. - BytesWritable serdeBytes[] = new BytesWritable[rowCount]; - - // Serialize using the SerDe, then below deserialize using DeserializeRead. - for (int i = 0; i < rowCount; i++) { +private void serializeSerDe(Object[][] rows, AbstractSerDe serde, StructObjectInspector rowOI, + AbstractSerDe serde_fewer, TypeInfo[] typeInfos, boolean doWriteFewerColumns, int rowCount, + BytesWritable[] serializeWriteBytes, int[][] perFieldWriteLengthsArray, BytesWritable[] serdeBytes) + throws SerDeException { + for (int i = 0; i < rowCount; i++) { Object[] row = rows[i]; // Since SerDe reuses memory, we will need to make a copy. @@ -279,9 +207,47 @@ private void testBinarySortableFast( } serdeBytes[i] = bytesWritable; } +} + +private void deserializeSerDe(Object[][] rows, AbstractSerDe serde, AbstractSerDe serde_fewer, + boolean doWriteFewerColumns, int rowCount, int writeColumnCount, BytesWritable[] serializeWriteBytes) + throws SerDeException { + for (int i = 0; i < rowCount; i++) { + BytesWritable bytesWritable = serializeWriteBytes[i]; + + // Note that regular SerDe doesn't tolerate fewer columns. + List deserializedRow; + if (doWriteFewerColumns) { + deserializedRow = (List) serde_fewer.deserialize(bytesWritable); + } else { + deserializedRow = (List) serde.deserialize(bytesWritable); + } - // Try to deserialize using DeserializeRead our Writable row objects created by SerDe. - for (int i = 0; i < rowCount; i++) { + Object[] row = rows[i]; + for (int index = 0; index < writeColumnCount; index++) { + Object expected = row[index]; + Object object = deserializedRow.get(index); + if (expected == null || object == null) { + if (expected != null || object != null) { + fail("SerDe deserialized NULL column mismatch"); + } + } else { + if (!object.equals(expected)) { + fail("SerDe deserialized value does not match (expected " + + expected.getClass().getName() + " " + + expected.toString() + ", actual " + + object.getClass().getName() + " " + + object.toString() + ")"); + } + } + } + } +} + +private void deserializeRead(Object[][] rows, boolean[] columnSortOrderIsDesc, byte[] columnNullMarker, + byte[] columnNotNullMarker, TypeInfo[] typeInfos, boolean useIncludeColumns, int rowCount, int columnCount, + boolean[] columnsToInclude, int writeColumnCount, BytesWritable[] serializeWriteBytes) throws IOException { + for (int i = 0; i < rowCount; i++) { Object[] row = rows[i]; BinarySortableDeserializeRead binarySortableDeserializeRead = new BinarySortableDeserializeRead( @@ -291,25 +257,100 @@ private void testBinarySortableFast( columnNullMarker, columnNotNullMarker); - - BytesWritable bytesWritable = serdeBytes[i]; - binarySortableDeserializeRead.set(bytesWritable.getBytes(), 0, bytesWritable.getLength()); + BytesWritable bytesWritable = serializeWriteBytes[i]; + binarySortableDeserializeRead.set( + bytesWritable.getBytes(), 0, bytesWritable.getLength()); for (int index = 0; index < columnCount; index++) { if (useIncludeColumns && !columnsToInclude[index]) { binarySortableDeserializeRead.skipNextField(); } else if (index >= writeColumnCount) { // Should come back a null. - verifyRead(binarySortableDeserializeRead, typeInfos[index], null); + VerifyFast.verifyDeserializeRead(binarySortableDeserializeRead, typeInfos[index], null); } else { verifyRead(binarySortableDeserializeRead, typeInfos[index], row[index]); } } if (writeColumnCount == columnCount) { - TestCase.assertTrue(binarySortableDeserializeRead.isEndOfInputReached()); + assertTrue(binarySortableDeserializeRead.isEndOfInputReached()); } + + /* + * Clip off one byte and expect to get an EOFException on the write field. + */ + BinarySortableDeserializeRead binarySortableDeserializeRead2 = + new BinarySortableDeserializeRead( + typeInfos, + /* useExternalBuffer */ false, + columnSortOrderIsDesc, + columnNullMarker, + columnNotNullMarker); + + binarySortableDeserializeRead2.set( + bytesWritable.getBytes(), 0, bytesWritable.getLength() - 1); // One fewer byte. + + for (int index = 0; index < writeColumnCount; index++) { + if (index == writeColumnCount - 1) { + boolean threw = false; + try { + verifyRead(binarySortableDeserializeRead2, typeInfos[index], row[index]); + } catch (EOFException e) { +// debugDetailedReadPositionString = binarySortableDeserializeRead2.getDetailedReadPositionString(); +// debugStackTrace = e.getStackTrace(); + threw = true; + } + + if (!threw && row[index] != null) { + Assert.fail(); + } + } else { + if (useIncludeColumns && !columnsToInclude[index]) { + binarySortableDeserializeRead2.skipNextField(); + } else { + verifyRead(binarySortableDeserializeRead2, typeInfos[index], row[index]); + } + } + } + } - } +} + +private void serializeWrite(Object[][] rows, boolean ascending, TypeInfo[] typeInfos, int rowCount, int columnCount, + int writeColumnCount, BinarySortableSerializeWrite binarySortableSerializeWrite, + BytesWritable[] serializeWriteBytes, int[][] perFieldWriteLengthsArray) throws IOException { + for (int i = 0; i < rowCount; i++) { + Object[] row = rows[i]; + Output output = new Output(); + binarySortableSerializeWrite.set(output); + + int[] perFieldWriteLengths = new int[columnCount]; + for (int index = 0; index < writeColumnCount; index++) { + VerifyFast.serializeWrite(binarySortableSerializeWrite, typeInfos[index], row[index]); + perFieldWriteLengths[index] = output.getLength(); + } + perFieldWriteLengthsArray[i] = perFieldWriteLengths; + + BytesWritable bytesWritable = new BytesWritable(); + bytesWritable.set(output.getData(), 0, output.getLength()); + serializeWriteBytes[i] = bytesWritable; + if (i > 0) { + BytesWritable previousBytesWritable = serializeWriteBytes[i - 1]; + int compareResult = previousBytesWritable.compareTo(bytesWritable); + if ((compareResult < 0 && !ascending) + || (compareResult > 0 && ascending)) { + System.out.println("Test failed in " + + (ascending ? "ascending" : "descending") + " order with " + + (i - 1) + " and " + i); + System.out.println("serialized data [" + (i - 1) + "] = " + + TestBinarySortableSerDe.hexString(serializeWriteBytes[i - 1])); + System.out.println("serialized data [" + i + "] = " + + TestBinarySortableSerDe.hexString(serializeWriteBytes[i])); + fail("Sort order of serialized " + (i - 1) + " and " + i + + " are reversed!"); + } + } + } +} private void verifyRead(BinarySortableDeserializeRead binarySortableDeserializeRead, TypeInfo typeInfo, Object expectedObject) throws IOException { @@ -319,7 +360,7 @@ private void verifyRead(BinarySortableDeserializeRead binarySortableDeserializeR Object complexFieldObj = VerifyFast.deserializeReadComplexType(binarySortableDeserializeRead, typeInfo); if (expectedObject == null) { if (complexFieldObj != null) { - TestCase.fail("Field reports not null but object is null (class " + complexFieldObj.getClass().getName() + + fail("Field reports not null but object is null (class " + complexFieldObj.getClass().getName() + ", " + complexFieldObj.toString() + ")"); } } else { @@ -331,12 +372,12 @@ private void verifyRead(BinarySortableDeserializeRead binarySortableDeserializeR return; } } - TestCase.fail("Field reports null but object is not null (class " + expectedObject.getClass().getName() + + fail("Field reports null but object is not null (class " + expectedObject.getClass().getName() + ", " + expectedObject.toString() + ")"); } } if (!VerifyLazy.lazyCompare(typeInfo, complexFieldObj, expectedObject)) { - TestCase.fail("Comparision failed typeInfo " + typeInfo.toString()); + fail("Comparision failed typeInfo " + typeInfo.toString()); } } } @@ -416,14 +457,14 @@ private void testBinarySortableFastCase( /* * Acending. */ - testBinarySortableFast(source, rows, + testBinarySortableFastM(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_ascending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, /* ascending */ true, typeInfos, /* useIncludeColumns */ false, /* doWriteFewerColumns */ false, r); - testBinarySortableFast(source, rows, + testBinarySortableFastM(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_ascending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, @@ -431,14 +472,14 @@ private void testBinarySortableFastCase( /* useIncludeColumns */ true, /* doWriteFewerColumns */ false, r); if (doWriteFewerColumns) { - testBinarySortableFast(source, rows, + testBinarySortableFastM(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_ascending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, /* ascending */ true, typeInfos, /* useIncludeColumns */ false, /* doWriteFewerColumns */ true, r); - testBinarySortableFast(source, rows, + testBinarySortableFastM(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_ascending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, @@ -451,14 +492,14 @@ private void testBinarySortableFastCase( */ Arrays.fill(columnSortOrderIsDesc, true); - testBinarySortableFast(source, rows, + testBinarySortableFastM(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_descending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, /* ascending */ false, typeInfos, /* useIncludeColumns */ false, /* doWriteFewerColumns */ false, r); - testBinarySortableFast(source, rows, + testBinarySortableFastM(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_descending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, @@ -466,14 +507,14 @@ private void testBinarySortableFastCase( /* useIncludeColumns */ true, /* doWriteFewerColumns */ false, r); if (doWriteFewerColumns) { - testBinarySortableFast(source, rows, + testBinarySortableFastM(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_descending, rowStructObjectInspector, serde_descending_fewer, writeRowStructObjectInspector, /* ascending */ false, typeInfos, /* useIncludeColumns */ false, /* doWriteFewerColumns */ true, r); - testBinarySortableFast(source, rows, + testBinarySortableFastM(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_descending, rowStructObjectInspector, serde_descending_fewer, writeRowStructObjectInspector, @@ -500,14 +541,17 @@ public void testBinarySortableFast(SerdeRandomRowSource.SupportedTypes supported } } + @Test public void testBinarySortableFastPrimitive() throws Throwable { testBinarySortableFast(SerdeRandomRowSource.SupportedTypes.PRIMITIVE, 0); } + @Test public void testBinarySortableFastComplexDepthOne() throws Throwable { testBinarySortableFast(SerdeRandomRowSource.SupportedTypes.ALL_EXCEPT_MAP, 1); } + @Test public void testBinarySortableFastComplexDepthFour() throws Throwable { testBinarySortableFast(SerdeRandomRowSource.SupportedTypes.ALL_EXCEPT_MAP, 4); } @@ -519,4 +563,4 @@ private static String displayBytes(byte[] bytes, int start, int length) { } return sb.toString(); } -} \ No newline at end of file +} diff --git serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java index e4e0213759..e64dc8061e 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java +++ serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java @@ -34,13 +34,16 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.io.BytesWritable; -import junit.framework.TestCase; + +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * TestBinarySortableSerDe. * */ -public class TestBinarySortableSerDe extends TestCase { +public class TestBinarySortableSerDe { private static final String DECIMAL_CHARS = "0123456789"; @@ -135,6 +138,7 @@ public static void sort(Object[] structs, ObjectInspector oi) { } } + @Test public void testBinarySortableSerDe() throws Throwable { try { diff --git serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestLazyBinaryColumnarSerDe.java serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestLazyBinaryColumnarSerDe.java index 08c7977c4c..70a3ba6e67 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestLazyBinaryColumnarSerDe.java +++ serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestLazyBinaryColumnarSerDe.java @@ -26,7 +26,7 @@ import java.util.TreeMap; import junit.framework.Assert; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; @@ -40,8 +40,14 @@ import org.apache.hadoop.hive.serde2.objectinspector.SimpleMapEqualComparer; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.io.LongWritable; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestLazyBinaryColumnarSerDe extends TestCase { +/** + * LazyBinaryColumnarSerDe Test. + */ +public class TestLazyBinaryColumnarSerDe { private static class InnerStruct { public InnerStruct(Integer i, Long l) { @@ -66,6 +72,7 @@ public InnerStruct(Integer i, Long l) { InnerStruct mStruct; } + @Test public void testSerDe() throws SerDeException { StructObjectInspector oi = (StructObjectInspector) ObjectInspectorFactory .getReflectionObjectInspector(OuterStruct.class, ObjectInspectorOptions.JAVA); @@ -107,6 +114,7 @@ public void testSerDe() throws SerDeException { } } + @Test public void testSerDeEmpties() throws SerDeException { StructObjectInspector oi = (StructObjectInspector) ObjectInspectorFactory .getReflectionObjectInspector(OuterStruct.class, ObjectInspectorOptions.JAVA); @@ -142,6 +150,7 @@ public void testSerDeEmpties() throws SerDeException { } } + @Test public void testLazyBinaryColumnarSerDeWithEmptyBinary() throws SerDeException { StructObjectInspector oi = (StructObjectInspector) ObjectInspectorFactory .getReflectionObjectInspector(OuterStruct.class, ObjectInspectorOptions.JAVA); @@ -175,6 +184,7 @@ public void testLazyBinaryColumnarSerDeWithEmptyBinary() throws SerDeException { assert false; } + @Test public void testSerDeOuterNulls() throws SerDeException { StructObjectInspector oi = (StructObjectInspector) ObjectInspectorFactory .getReflectionObjectInspector(OuterStruct.class, ObjectInspectorOptions.JAVA); @@ -199,6 +209,7 @@ public void testSerDeOuterNulls() throws SerDeException { } } + @Test public void testSerDeInnerNulls() throws SerDeException { StructObjectInspector oi = (StructObjectInspector) ObjectInspectorFactory .getReflectionObjectInspector(OuterStruct.class, ObjectInspectorOptions.JAVA); @@ -261,6 +272,7 @@ public void testSerDeInnerNulls() throws SerDeException { * in the new schema, and seeing if this serde can to read both types of data from the resultant table. * @throws SerDeException */ + @Test public void testHandlingAlteredSchemas() throws SerDeException { StructObjectInspector oi = (StructObjectInspector) ObjectInspectorFactory .getReflectionObjectInspector(BeforeStruct.class, @@ -322,4 +334,4 @@ public void testHandlingAlteredSchemas() throws SerDeException { Assert.assertEquals(((LongWritable) objs2.get(1)).get(), 12L); Assert.assertEquals(((LongWritable) objs2.get(2)).get(), 13L); } -} \ No newline at end of file +} diff --git serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java index 730764e516..d21a86b1c7 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java +++ serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java @@ -25,7 +25,7 @@ import java.util.Map.Entry; import java.util.Properties; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde2.SerDeException; @@ -41,12 +41,17 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import org.junit.Test; /** * Tests LazyArray, LazyMap, LazyStruct and LazyUnion * */ -public class TestLazyArrayMapStruct extends TestCase { +public class TestLazyArrayMapStruct { // nesting level limits static final int EXTENDED_LEVEL_THRESHOLD = 24; @@ -55,6 +60,7 @@ /** * Test the LazyArray class. */ + @Test public void testLazyArray() throws Throwable { try { // Array of Byte @@ -133,6 +139,7 @@ public void testLazyArray() throws Throwable { /** * Test the LazyMap class. */ + @Test public void testLazyMap() throws Throwable { try { { @@ -199,6 +206,7 @@ public void testLazyMap() throws Throwable { * where '[' and ']' don't exist, only for notation purpose, * STX with value of 2 as entry separator, ETX with 3 as key/value separator * */ + @Test public void testLazyMapWithBadEntries() throws Throwable { try { { @@ -304,6 +312,7 @@ public void testLazyMapWithBadEntries() throws Throwable { /** * Test the LazyMap class. */ + @Test public void testLazyMapWithDuplicateKeys() throws Throwable { try { { @@ -367,6 +376,7 @@ public void testLazyMapWithDuplicateKeys() throws Throwable { /** * Test the LazyStruct class. */ + @Test public void testLazyStruct() throws Throwable { try { { @@ -448,6 +458,7 @@ public void testLazyStruct() throws Throwable { /** * Test the LazyUnion class. */ + @Test public void testLazyUnion() throws Throwable { try { { @@ -516,6 +527,7 @@ public void testLazyUnion() throws Throwable { /** * Test the LazyArray class with multiple levels of nesting */ + @Test public void testLazyArrayNested() throws Throwable { for(int i = 2; i < EXTENDED_LEVEL_THRESHOLD; i++ ){ testNestedinArrayAtLevelExtended(i, ObjectInspector.Category.LIST); @@ -525,6 +537,7 @@ public void testLazyArrayNested() throws Throwable { /** * Test the LazyArray class with multiple levels of nesting */ + @Test public void testLazyArrayNestedExceedLimit() throws Throwable { checkExtendedLimitExceeded(EXTENDED_LEVEL_THRESHOLD, ObjectInspector.Category.LIST); } @@ -543,6 +556,7 @@ private void checkExtendedLimitExceeded(int maxLevel, Category type) { * Test the LazyArray class with multiple levels of nesting, when nesting * levels are not extended */ + @Test public void testLazyArrayNestedExceedLimitNotExtended() throws Throwable { checkNotExtendedLimitExceeded(DEFAULT_LEVEL_THRESHOLD, ObjectInspector.Category.LIST); @@ -552,6 +566,7 @@ public void testLazyArrayNestedExceedLimitNotExtended() throws Throwable { * Test the LazyMap class with multiple levels of nesting, when nesting * levels are not extended */ + @Test public void testLazyMapNestedExceedLimitNotExtended() throws Throwable { checkNotExtendedLimitExceeded(DEFAULT_LEVEL_THRESHOLD-1, ObjectInspector.Category.MAP); @@ -561,6 +576,7 @@ public void testLazyMapNestedExceedLimitNotExtended() throws Throwable { * Test the LazyMap class with multiple levels of nesting, when nesting * levels are not extended */ + @Test public void testLazyStructNestedExceedLimitNotExtended() throws Throwable { checkNotExtendedLimitExceeded(DEFAULT_LEVEL_THRESHOLD, ObjectInspector.Category.STRUCT); @@ -570,6 +586,7 @@ public void testLazyStructNestedExceedLimitNotExtended() throws Throwable { * Test the LazyMap class with multiple levels of nesting, when nesting * levels are not extended */ + @Test public void testLazyUnionNestedExceedLimitNotExtended() throws Throwable { checkNotExtendedLimitExceeded(DEFAULT_LEVEL_THRESHOLD, ObjectInspector.Category.UNION); @@ -588,6 +605,7 @@ private void checkNotExtendedLimitExceeded(int maxLevel, Category type) { /** * Test the LazyMap class with multiple levels of nesting */ + @Test public void testLazyMapNested() throws Throwable { //map max nesting level is one less because it uses an additional separator for(int i = 2; i < EXTENDED_LEVEL_THRESHOLD - 1; i++ ){ @@ -598,6 +616,7 @@ public void testLazyMapNested() throws Throwable { /** * Test the LazyMap class with multiple levels of nesting */ + @Test public void testLazyMapNestedExceedLimit() throws Throwable { //map max nesting level is one less because it uses an additional separator checkExtendedLimitExceeded(EXTENDED_LEVEL_THRESHOLD - 1, ObjectInspector.Category.MAP); @@ -606,6 +625,7 @@ public void testLazyMapNestedExceedLimit() throws Throwable { /** * Test the LazyUnion class with multiple levels of nesting */ + @Test public void testLazyUnionNested() throws Throwable { for(int i = 2; i < EXTENDED_LEVEL_THRESHOLD; i++ ){ testNestedinArrayAtLevelExtended(i, ObjectInspector.Category.UNION); @@ -615,6 +635,7 @@ public void testLazyUnionNested() throws Throwable { /** * Test the LazyUnion class with multiple levels of nesting */ + @Test public void testLazyUnionNestedExceedLimit() throws Throwable { checkExtendedLimitExceeded(EXTENDED_LEVEL_THRESHOLD, ObjectInspector.Category.UNION); } @@ -622,6 +643,7 @@ public void testLazyUnionNestedExceedLimit() throws Throwable { /** * Test the LazyStruct class with multiple levels of nesting */ + @Test public void testLazyStructNested() throws Throwable { for(int i = 2; i < EXTENDED_LEVEL_THRESHOLD; i++ ){ testNestedinArrayAtLevelExtended(i, ObjectInspector.Category.STRUCT); @@ -645,6 +667,7 @@ private void testNestedinArrayAtLevelExtended(int nestingLevel, /** * Test the LazyStruct class with multiple levels of nesting */ + @Test public void testLazyStructNestedExceedLimit() throws Throwable { checkExtendedLimitExceeded(EXTENDED_LEVEL_THRESHOLD, ObjectInspector.Category.STRUCT); } diff --git serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java index 79bf5fb092..828f17f0ce 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java +++ serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.serde2.lazy; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.Timestamp; @@ -30,12 +30,16 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import org.junit.Test; /** * TestLazyPrimitive. * */ -public class TestLazyPrimitive extends TestCase { +public class TestLazyPrimitive { /** * Initialize the LazyObject with the parameters, wrapping the byte[] @@ -51,6 +55,7 @@ public static void initLazyObject(LazyObject lo, byte[] data, int start, /** * Test the LazyByte class. */ + @Test public void testLazyByte() throws Throwable { try { LazyByte b = new LazyByte( @@ -94,6 +99,7 @@ public void testLazyByte() throws Throwable { /** * Test the LazyShort class. */ + @Test public void testLazyShort() throws Throwable { try { LazyShort b = new LazyShort( @@ -141,6 +147,7 @@ public void testLazyShort() throws Throwable { /** * Test the LazyInteger class. */ + @Test public void testLazyInteger() throws Throwable { try { LazyInteger b = new LazyInteger( @@ -196,6 +203,7 @@ public void testLazyInteger() throws Throwable { /** * Test the LazyLong class. */ + @Test public void testLazyLong() throws Throwable { try { LazyLong b = new LazyLong( @@ -259,6 +267,7 @@ public void testLazyLong() throws Throwable { /** * Test the LazyDouble class. */ + @Test public void testLazyDouble() throws Throwable { try { LazyDouble b = new LazyDouble( @@ -359,6 +368,7 @@ public void testLazyDouble() throws Throwable { /** * Test the LazyString class. */ + @Test public void testLazyString() throws Throwable { try { LazyString b = new LazyString(LazyPrimitiveObjectInspectorFactory @@ -376,6 +386,7 @@ public void testLazyString() throws Throwable { } } + @Test public void testLazyBinary() { LazyBinary ba = new LazyBinary(LazyPrimitiveObjectInspectorFactory.LAZY_BINARY_OBJECT_INSPECTOR); initLazyObject(ba, new byte[] {}, 0, 0); @@ -390,6 +401,7 @@ public void testLazyBinary() { assertEquals(new BytesWritable(new byte[] {'\n'}), ba.getWritableObject()); } + @Test public void testLazyTimestamp() throws Throwable { LazyTimestamp t = new LazyTimestamp(LazyPrimitiveObjectInspectorFactory.LAZY_TIMESTAMP_OBJECT_INSPECTOR); String nullDate = "NULL"; @@ -408,6 +420,7 @@ public void testLazyTimestamp() throws Throwable { assertEquals(true, t.isNull); } + @Test public void testLazyDate() throws Throwable { LazyDate t = new LazyDate(LazyPrimitiveObjectInspectorFactory.LAZY_DATE_OBJECT_INSPECTOR); String nullDate = "NULL"; @@ -427,6 +440,7 @@ public void testLazyDate() throws Throwable { } + @Test public void testLazyIntegerWrite() throws Throwable { try { ByteStream.Output out = new ByteStream.Output(); @@ -447,6 +461,7 @@ public void testLazyIntegerWrite() throws Throwable { } } + @Test public void testLazyLongWrite() throws Throwable { try { ByteStream.Output out = new ByteStream.Output(); @@ -501,6 +516,7 @@ private void testLongCaseWithFail(String strVal, boolean trim) { } } + @Test public void testLazyIntWithSpaces() throws Throwable { Object[][] casesWithoutSpaces = { {"0", 0}, @@ -583,6 +599,7 @@ public void testLazyIntWithSpaces() throws Throwable { } } + @Test public void testLazyLongWithSpaces() throws Throwable { Object[][] casesWithoutSpaces = { {"0", 0}, diff --git serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleFast.java serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleFast.java index 34b51c8fa9..da6cfbd041 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleFast.java +++ serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleFast.java @@ -46,11 +46,17 @@ import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestLazySimpleFast extends TestCase { +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import org.junit.Test; - private void testLazySimpleFast( +/** + * LazySimpleFast Test. + */ +public class TestLazySimpleFast { + + private void testLazySimpleFastM( SerdeRandomRowSource source, Object[][] rows, LazySimpleSerDe serde, StructObjectInspector rowOI, LazySimpleSerDe serde_fewer, StructObjectInspector writeRowOI, @@ -122,7 +128,7 @@ private void testLazySimpleFast( } } if (writeColumnCount == columnCount) { - TestCase.assertTrue(lazySimpleDeserializeRead.isEndOfInputReached()); + assertTrue(lazySimpleDeserializeRead.isEndOfInputReached()); } } @@ -197,7 +203,7 @@ private void testLazySimpleFast( } } if (writeColumnCount == columnCount) { - TestCase.assertTrue(lazySimpleDeserializeRead.isEndOfInputReached()); + assertTrue(lazySimpleDeserializeRead.isEndOfInputReached()); } } } @@ -209,7 +215,7 @@ private void verifyReadNull(LazySimpleDeserializeRead lazySimpleDeserializeRead, } else { Object complexFieldObj = VerifyFast.deserializeReadComplexType(lazySimpleDeserializeRead, typeInfo); if (complexFieldObj != null) { - TestCase.fail("Field report not null but object is null"); + fail("Field report not null but object is null"); } } } @@ -222,7 +228,8 @@ private void verifyRead(LazySimpleDeserializeRead lazySimpleDeserializeRead, Object complexFieldObj = VerifyFast.deserializeReadComplexType(lazySimpleDeserializeRead, typeInfo); if (expectedObject == null) { if (complexFieldObj != null) { - TestCase.fail("Field reports not null but object is null (class " + complexFieldObj.getClass().getName() + ", " + complexFieldObj.toString() + ")"); + fail("Field reports not null but object is null (class " + complexFieldObj.getClass().getName() + + ", " + complexFieldObj.toString() + ")"); } } else { if (complexFieldObj == null) { @@ -233,11 +240,12 @@ private void verifyRead(LazySimpleDeserializeRead lazySimpleDeserializeRead, return; } } - TestCase.fail("Field reports null but object is not null (class " + expectedObject.getClass().getName() + ", " + expectedObject.toString() + ")"); + fail("Field reports null but object is not null (class " + expectedObject.getClass().getName() + + ", " + expectedObject.toString() + ")"); } } if (!VerifyLazy.lazyCompare(typeInfo, complexFieldObj, expectedObject)) { - TestCase.fail("Comparision failed typeInfo " + typeInfo.toString()); + fail("Comparision failed typeInfo " + typeInfo.toString()); } } } @@ -333,14 +341,14 @@ public void testLazySimpleFastCase( } - testLazySimpleFast( + testLazySimpleFastM( source, rows, serde, rowStructObjectInspector, serde_fewer, writeRowStructObjectInspector, serdeParams, serdeParams_fewer, typeInfos, /* useIncludeColumns */ false, /* doWriteFewerColumns */ false, r); - testLazySimpleFast( + testLazySimpleFastM( source, rows, serde, rowStructObjectInspector, serde_fewer, writeRowStructObjectInspector, @@ -348,14 +356,14 @@ public void testLazySimpleFastCase( /* useIncludeColumns */ true, /* doWriteFewerColumns */ false, r); if (doWriteFewerColumns) { - testLazySimpleFast( + testLazySimpleFastM( source, rows, serde, rowStructObjectInspector, serde_fewer, writeRowStructObjectInspector, serdeParams, serdeParams_fewer, typeInfos, /* useIncludeColumns */ false, /* doWriteFewerColumns */ true, r); - testLazySimpleFast( + testLazySimpleFastM( source, rows, serde, rowStructObjectInspector, serde_fewer, writeRowStructObjectInspector, @@ -364,7 +372,7 @@ public void testLazySimpleFastCase( } } - public void testLazySimpleFast(SerdeRandomRowSource.SupportedTypes supportedTypes, int depth) throws Throwable { + public void testLazySimpleFastN(SerdeRandomRowSource.SupportedTypes supportedTypes, int depth) throws Throwable { try { Random r = new Random(8322); @@ -381,18 +389,22 @@ public void testLazySimpleFast(SerdeRandomRowSource.SupportedTypes supportedType } } + @Test public void testLazyBinarySimplePrimitive() throws Throwable { - testLazySimpleFast(SerdeRandomRowSource.SupportedTypes.PRIMITIVE, 0); + testLazySimpleFastN(SerdeRandomRowSource.SupportedTypes.PRIMITIVE, 0); } + @Test public void testLazyBinarySimpleComplexDepthOne() throws Throwable { - testLazySimpleFast(SerdeRandomRowSource.SupportedTypes.ALL, 1); + testLazySimpleFastN(SerdeRandomRowSource.SupportedTypes.ALL, 1); } + @Test public void testLazyBinarySimpleComplexDepthFour() throws Throwable { - testLazySimpleFast(SerdeRandomRowSource.SupportedTypes.ALL, 4); + testLazySimpleFastN(SerdeRandomRowSource.SupportedTypes.ALL, 4); } + @Test public void testLazySimpleDeserializeRowEmptyArray() throws Throwable { HiveConf hconf = new HiveConf(); @@ -417,6 +429,6 @@ public void testLazySimpleDeserializeRowEmptyArray() throws Throwable { deserializeRead.set(bytes, 0, bytes.length); verifyRead(deserializeRead, typeInfos[0], Collections.emptyList()); verifyRead(deserializeRead, typeInfos[1], Collections.emptyList()); - TestCase.assertTrue(deserializeRead.isEndOfInputReached()); + assertTrue(deserializeRead.isEndOfInputReached()); } -} \ No newline at end of file +} diff --git serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java index d289205792..c697dcf5c7 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java +++ serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java @@ -22,7 +22,7 @@ import java.util.Properties; import java.util.Random; -import junit.framework.TestCase; + import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.conf.Configuration; @@ -46,16 +46,18 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.junit.Test; +import static org.junit.Assert.assertEquals; /** * TestLazySimpleSerDe. * */ -public class TestLazySimpleSerDe extends TestCase { +public class TestLazySimpleSerDe { /** * Test the LazySimpleSerDe class. */ + @Test public void testLazySimpleSerDe() throws Throwable { try { // Create the SerDe @@ -92,6 +94,7 @@ public void testLazySimpleSerDe() throws Throwable { /** * Test the LazySimpleSerDe class with LastColumnTakesRest option. */ + @Test public void testLazySimpleSerDeLastColumnTakesRest() throws Throwable { try { // Create the SerDe @@ -121,6 +124,7 @@ public void testLazySimpleSerDeLastColumnTakesRest() throws Throwable { /** * Test the LazySimpleSerDe class with extra columns. */ + @Test public void testLazySimpleSerDeExtraColumns() throws Throwable { try { // Create the SerDe @@ -149,6 +153,7 @@ public void testLazySimpleSerDeExtraColumns() throws Throwable { /** * Test the LazySimpleSerDe class with missing columns. */ + @Test public void testLazySimpleSerDeMissingColumns() throws Throwable { try { // Create the SerDe diff --git serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java index a7873f2767..196d2b6c73 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java +++ serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.serde2.lazy.fast; -import junit.framework.TestCase; + import java.util.Properties; @@ -28,17 +28,21 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * Unit tests for LazySimpleDeserializeRead. * */ -public class TestLazySimpleDeserializeRead extends TestCase { +public class TestLazySimpleDeserializeRead { /** * Test for escaping. * */ + @Test public void testEscaping() throws Exception { HiveConf hconf = new HiveConf(); diff --git serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryFast.java serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryFast.java index e95c6eb435..e3d960742b 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryFast.java +++ serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryFast.java @@ -22,7 +22,7 @@ import java.util.Arrays; import java.util.Random; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde2.ByteStream.Output; import org.apache.hadoop.hive.serde2.AbstractSerDe; @@ -39,10 +39,16 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import org.junit.Test; -public class TestLazyBinaryFast extends TestCase { +/** + * TestLazyBinaryFast. + */ +public class TestLazyBinaryFast { - private void testLazyBinaryFast( + private void testLazyBinaryFastM( SerdeRandomRowSource source, Object[][] rows, AbstractSerDe serde, StructObjectInspector rowOI, AbstractSerDe serde_fewer, StructObjectInspector writeRowOI, @@ -51,7 +57,6 @@ private void testLazyBinaryFast( int rowCount = rows.length; int columnCount = typeInfos.length; - boolean[] columnsToInclude = null; if (useIncludeColumns) { columnsToInclude = new boolean[columnCount]; @@ -76,11 +81,9 @@ private void testLazyBinaryFast( Object[] row = rows[i]; Output output = new Output(); lazyBinarySerializeWrite.set(output); - for (int index = 0; index < writeColumnCount; index++) { VerifyFast.serializeWrite(lazyBinarySerializeWrite, typeInfos[index], row[index]); } - BytesWritable bytesWritable = new BytesWritable(); bytesWritable.set(output.getData(), 0, output.getLength()); serializeWriteBytes[i] = bytesWritable; @@ -89,14 +92,12 @@ private void testLazyBinaryFast( // Try to deserialize for (int i = 0; i < rowCount; i++) { Object[] row = rows[i]; - // Specifying the right type info length tells LazyBinaryDeserializeRead which is the last // column. LazyBinaryDeserializeRead lazyBinaryDeserializeRead = new LazyBinaryDeserializeRead( writeTypeInfos, /* useExternalBuffer */ false); - BytesWritable bytesWritable = serializeWriteBytes[i]; lazyBinaryDeserializeRead.set(bytesWritable.getBytes(), 0, bytesWritable.getLength()); @@ -111,7 +112,7 @@ private void testLazyBinaryFast( } } if (writeColumnCount == columnCount) { - TestCase.assertTrue(lazyBinaryDeserializeRead.isEndOfInputReached()); + assertTrue(lazyBinaryDeserializeRead.isEndOfInputReached()); } } @@ -124,9 +125,7 @@ private void testLazyBinaryFast( } else { lazyBinaryStruct = (LazyBinaryStruct) serde.deserialize(bytesWritable); } - Object[] row = rows[i]; - for (int index = 0; index < writeColumnCount; index++) { TypeInfo typeInfo = typeInfos[index]; Object object = lazyBinaryStruct.getField(index); @@ -149,24 +148,20 @@ private void testLazyBinaryFast( Object[] serdeRow = new Object[writeColumnCount]; for (int i = 0; i < rowCount; i++) { Object[] row = rows[i]; - // LazyBinary seems to work better with an row object array instead of a Java object... for (int index = 0; index < writeColumnCount; index++) { serdeRow[index] = row[index]; } - BytesWritable serialized; if (doWriteFewerColumns) { serialized = (BytesWritable) serde_fewer.serialize(serdeRow, writeRowOI); } else { serialized = (BytesWritable) serde.serialize(serdeRow, rowOI); } - BytesWritable bytesWritable = new BytesWritable( Arrays.copyOfRange(serialized.getBytes(), 0, serialized.getLength())); byte[] bytes1 = bytesWritable.getBytes(); - BytesWritable lazySerializedWriteBytes = serializeWriteBytes[i]; byte[] bytes2 = Arrays.copyOfRange(lazySerializedWriteBytes.getBytes(), 0, lazySerializedWriteBytes.getLength()); if (bytes1.length != bytes2.length) { @@ -204,7 +199,7 @@ private void testLazyBinaryFast( } } if (writeColumnCount == columnCount) { - TestCase.assertTrue(lazyBinaryDeserializeRead.isEndOfInputReached()); + assertTrue(lazyBinaryDeserializeRead.isEndOfInputReached()); } } } @@ -217,7 +212,7 @@ private void verifyRead(LazyBinaryDeserializeRead lazyBinaryDeserializeRead, Object complexFieldObj = VerifyFast.deserializeReadComplexType(lazyBinaryDeserializeRead, typeInfo); if (expectedObject == null) { if (complexFieldObj != null) { - TestCase.fail("Field reports not null but object is null (class " + complexFieldObj.getClass().getName() + + fail("Field reports not null but object is null (class " + complexFieldObj.getClass().getName() + ", " + complexFieldObj.toString() + ")"); } } else { @@ -229,12 +224,12 @@ private void verifyRead(LazyBinaryDeserializeRead lazyBinaryDeserializeRead, return; } } - TestCase.fail("Field reports null but object is not null (class " + expectedObject.getClass().getName() + + fail("Field reports null but object is not null (class " + expectedObject.getClass().getName() + ", " + expectedObject.toString() + ")"); } } if (!VerifyLazy.lazyCompare(typeInfo, complexFieldObj, expectedObject)) { - TestCase.fail("Comparision failed typeInfo " + typeInfo.toString()); + fail("Comparision failed typeInfo " + typeInfo.toString()); } } } @@ -285,14 +280,14 @@ public void testLazyBinaryFastCase( serde_fewer = testLazyBinarySerDe.getSerDe(partialFieldNames, partialFieldTypes);; } - testLazyBinaryFast( + testLazyBinaryFastM( source, rows, serde, rowStructObjectInspector, serde_fewer, writeRowStructObjectInspector, typeInfos, /* useIncludeColumns */ false, /* doWriteFewerColumns */ false, r); - testLazyBinaryFast( + testLazyBinaryFastM( source, rows, serde, rowStructObjectInspector, serde_fewer, writeRowStructObjectInspector, @@ -303,14 +298,14 @@ public void testLazyBinaryFastCase( * Can the LazyBinary format really tolerate writing fewer columns? */ // if (doWriteFewerColumns) { - // testLazyBinaryFast( + // testLazyBinaryFastM( // source, rows, // serde, rowStructObjectInspector, // serde_fewer, writeRowStructObjectInspector, // primitiveTypeInfos, // /* useIncludeColumns */ false, /* doWriteFewerColumns */ true, r); - // testLazyBinaryFast( + // testLazyBinaryFastM( // source, rows, // serde, rowStructObjectInspector, // serde_fewer, writeRowStructObjectInspector, @@ -319,7 +314,7 @@ public void testLazyBinaryFastCase( // } } - private void testLazyBinaryFast(SerdeRandomRowSource.SupportedTypes supportedTypes, int depth) throws Throwable { + private void testLazyBinaryFastN(SerdeRandomRowSource.SupportedTypes supportedTypes, int depth) throws Throwable { try { Random r = new Random(9983); @@ -335,15 +330,18 @@ private void testLazyBinaryFast(SerdeRandomRowSource.SupportedTypes supportedTyp } } + @Test public void testLazyBinaryFastPrimitive() throws Throwable { - testLazyBinaryFast(SerdeRandomRowSource.SupportedTypes.PRIMITIVE, 0); + testLazyBinaryFastN(SerdeRandomRowSource.SupportedTypes.PRIMITIVE, 0); } + @Test public void testLazyBinaryFastComplexDepthOne() throws Throwable { - testLazyBinaryFast(SerdeRandomRowSource.SupportedTypes.ALL, 1); + testLazyBinaryFastN(SerdeRandomRowSource.SupportedTypes.ALL, 1); } + @Test public void testLazyBinaryFastComplexDepthFour() throws Throwable { - testLazyBinaryFast(SerdeRandomRowSource.SupportedTypes.ALL, 4); + testLazyBinaryFastN(SerdeRandomRowSource.SupportedTypes.ALL, 4); } } diff --git serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java index d120d274c0..60c95ff146 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java +++ serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java @@ -27,7 +27,7 @@ import java.util.Properties; import java.util.Random; -import junit.framework.TestCase; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -58,12 +58,15 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.BytesWritable; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import org.junit.Test; /** * TestLazyBinarySerDe. * */ -public class TestLazyBinarySerDe extends TestCase { +public class TestLazyBinarySerDe { /** * Generate a random struct array. @@ -424,6 +427,7 @@ void testLazyBinaryMap(Random r) throws Throwable { * * @throws Throwable */ + @Test public void testLazyBinarySerDe() throws Throwable { try { @@ -481,6 +485,7 @@ private BytesWritable getInputBytesWritable() { * JavaBinaryObjectInspector from input BytesWritable * @throws Throwable */ + @Test public void testJavaBinaryObjectInspector() throws Throwable { BytesWritable bW = getInputBytesWritable(); @@ -501,6 +506,7 @@ public void testJavaBinaryObjectInspector() throws Throwable { * WritableBinaryObjectInspector from input BytesWritable * @throws Throwable */ + @Test public void testWritableBinaryObjectInspector() throws Throwable { BytesWritable bW = getInputBytesWritable(); @@ -520,6 +526,7 @@ public void testWritableBinaryObjectInspector() throws Throwable { * LazyBinaryObjectInspector from input BytesWritable * @throws Throwable */ + @Test public void testLazyBinaryObjectInspector() throws Throwable { //create input ByteArrayRef diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestCrossMapEqualComparer.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestCrossMapEqualComparer.java index 1598cdcee3..b343406d48 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestCrossMapEqualComparer.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestCrossMapEqualComparer.java @@ -32,9 +32,15 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestCrossMapEqualComparer extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import org.junit.Test; + +/** + * CrossMapEqualComparer Test. + */ +public class TestCrossMapEqualComparer { public static class IntegerStringMapHolder { Map mMap; @@ -44,6 +50,7 @@ public IntegerStringMapHolder() { } } + @Test public void testSameType() { // empty maps IntegerStringMapHolder o1 = new IntegerStringMapHolder(); @@ -88,6 +95,7 @@ Object serializeAndDeserialize(TextStringMapHolder o1, StructObjectInspector oi1 return serde.deserialize(t); } + @Test public void testCompatibleType() throws SerDeException, IOException { // empty maps TextStringMapHolder o1 = new TextStringMapHolder(); @@ -141,6 +149,7 @@ Object serializeAndDeserialize(StringTextMapHolder o1, StructObjectInspector oi1 return serde.deserialize(t); } + @Test public void testIncompatibleType() throws SerDeException, IOException { // empty maps StringTextMapHolder o1 = new StringTextMapHolder(); diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestFullMapEqualComparer.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestFullMapEqualComparer.java index 41c010b499..6ccf212f82 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestFullMapEqualComparer.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestFullMapEqualComparer.java @@ -20,11 +20,16 @@ import java.util.Map; import java.util.TreeMap; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions; +import static org.junit.Assert.assertTrue; +import org.junit.Test; -public class TestFullMapEqualComparer extends TestCase { +/** + * FullMapEqualComparer Test. + */ +public class TestFullMapEqualComparer { public static class IntegerIntegerMapHolder { Map mMap; @@ -34,6 +39,7 @@ public IntegerIntegerMapHolder() { } } + @Test public void testAntiSymmetry() { IntegerIntegerMapHolder o1 = new IntegerIntegerMapHolder(); IntegerIntegerMapHolder o2 = new IntegerIntegerMapHolder(); @@ -66,6 +72,7 @@ public void testAntiSymmetry() { } + @Test public void testTransitivity() { IntegerIntegerMapHolder o1 = new IntegerIntegerMapHolder(); IntegerIntegerMapHolder o2 = new IntegerIntegerMapHolder(); diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java index 175d453cef..f829f9412f 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java @@ -20,7 +20,7 @@ import java.util.ArrayList; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -42,177 +42,169 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import org.junit.Test; /** * TestObjectInspectorConverters. * */ -public class TestObjectInspectorConverters extends TestCase { +public class TestObjectInspectorConverters { + @Test public void testObjectInspectorConverters() throws Throwable { try { // Boolean - Converter booleanConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaIntObjectInspector, - PrimitiveObjectInspectorFactory.writableBooleanObjectInspector); - assertEquals("BooleanConverter", new BooleanWritable(false), - booleanConverter.convert(Integer.valueOf(0))); - assertEquals("BooleanConverter", new BooleanWritable(true), - booleanConverter.convert(Integer.valueOf(1))); - assertEquals("BooleanConverter", null, booleanConverter.convert(null)); + convertBoolean(); // Byte - Converter byteConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaIntObjectInspector, - PrimitiveObjectInspectorFactory.writableByteObjectInspector); - assertEquals("ByteConverter", new ByteWritable((byte) 0), byteConverter - .convert(Integer.valueOf(0))); - assertEquals("ByteConverter", new ByteWritable((byte) 1), byteConverter - .convert(Integer.valueOf(1))); - assertEquals("ByteConverter", null, byteConverter.convert(null)); + convertByte(); // Short - Converter shortConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaIntObjectInspector, - PrimitiveObjectInspectorFactory.writableShortObjectInspector); - assertEquals("ShortConverter", new ShortWritable((short) 0), - shortConverter.convert(Integer.valueOf(0))); - assertEquals("ShortConverter", new ShortWritable((short) 1), - shortConverter.convert(Integer.valueOf(1))); - assertEquals("ShortConverter", null, shortConverter.convert(null)); + convertShort(); // Int - Converter intConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaIntObjectInspector, - PrimitiveObjectInspectorFactory.writableIntObjectInspector); - assertEquals("IntConverter", new IntWritable(0), intConverter - .convert(Integer.valueOf(0))); - assertEquals("IntConverter", new IntWritable(1), intConverter - .convert(Integer.valueOf(1))); - assertEquals("IntConverter", null, intConverter.convert(null)); + convertInt(); // Long - Converter longConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaIntObjectInspector, - PrimitiveObjectInspectorFactory.writableLongObjectInspector); - assertEquals("LongConverter", new LongWritable(0), longConverter - .convert(Integer.valueOf(0))); - assertEquals("LongConverter", new LongWritable(1), longConverter - .convert(Integer.valueOf(1))); - assertEquals("LongConverter", null, longConverter.convert(null)); + convertLong(); // Float - Converter floatConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaIntObjectInspector, - PrimitiveObjectInspectorFactory.writableFloatObjectInspector); - assertEquals("LongConverter", new FloatWritable(0), floatConverter - .convert(Integer.valueOf(0))); - assertEquals("LongConverter", new FloatWritable(1), floatConverter - .convert(Integer.valueOf(1))); - assertEquals("LongConverter", null, floatConverter.convert(null)); + convertFloat(); // Double - Converter doubleConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaIntObjectInspector, - PrimitiveObjectInspectorFactory.writableDoubleObjectInspector); - assertEquals("DoubleConverter", new DoubleWritable(0), doubleConverter - .convert(Integer.valueOf(0))); - assertEquals("DoubleConverter", new DoubleWritable(1), doubleConverter - .convert(Integer.valueOf(1))); - assertEquals("DoubleConverter", null, doubleConverter.convert(null)); + convertDouble(); // Char - Converter charConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, - PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector); - assertEquals("CharConverter", new HiveChar("TRUE", -1), charConverter - .convert(Boolean.valueOf(true))); - assertEquals("CharConverter", new HiveChar("FALSE", -1), charConverter - .convert(Boolean.valueOf(false))); + convertChar(); - charConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, - PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector); - assertEquals("CharConverter", new HiveCharWritable(new HiveChar("TRUE", -1)), charConverter - .convert(Boolean.valueOf(true))); - assertEquals("CharConverter", new HiveCharWritable(new HiveChar("FALSE", -1)), charConverter - .convert(Boolean.valueOf(false))); + // VarChar + convertVarChar(); - charConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaIntObjectInspector, - PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector); - assertEquals("CharConverter", new HiveChar("0", -1), charConverter - .convert(Integer.valueOf(0))); - assertEquals("CharConverter", new HiveChar("1", -1), charConverter - .convert(Integer.valueOf(1))); + // Text + convertText(); - charConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaIntObjectInspector, - PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector); - assertEquals("CharConverter", new HiveCharWritable(new HiveChar("0", -1)), charConverter - .convert(Integer.valueOf(0))); - assertEquals("CharConverter", new HiveCharWritable(new HiveChar("1", -1)), charConverter - .convert(Integer.valueOf(1))); + // Binary + converBinary(); - charConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaStringObjectInspector, - PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector); - assertEquals("CharConverter", new HiveChar("hive", -1), charConverter - .convert(String.valueOf("hive"))); + // Union + convertUnion(); - charConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaStringObjectInspector, - PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector); - assertEquals("CharConverter", new HiveCharWritable(new HiveChar("hive", -1)), charConverter - .convert(String.valueOf("hive"))); + } catch (Throwable e) { + e.printStackTrace(); + throw e; + } - // VarChar - Converter varcharConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, - PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector); - assertEquals("VarCharConverter", new HiveVarchar("TRUE", -1), varcharConverter - .convert(Boolean.valueOf(true))); - assertEquals("VarCharConverter", new HiveVarchar("FALSE", -1), varcharConverter - .convert(Boolean.valueOf(false))); + } - varcharConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, - PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector); - assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("TRUE", -1)), varcharConverter - .convert(Boolean.valueOf(true))); - assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("FALSE", -1)), varcharConverter - .convert(Boolean.valueOf(false))); +private void convertUnion() { + ArrayList fieldNames = new ArrayList(); + fieldNames.add("firstInteger"); + fieldNames.add("secondString"); + fieldNames.add("thirdBoolean"); + ArrayList fieldObjectInspectors = new ArrayList(); + fieldObjectInspectors + .add(PrimitiveObjectInspectorFactory.javaIntObjectInspector); + fieldObjectInspectors + .add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); + fieldObjectInspectors + .add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector); - varcharConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaIntObjectInspector, - PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector); - assertEquals("VarCharConverter", new HiveVarchar("0", -1), varcharConverter - .convert(Integer.valueOf(0))); - assertEquals("VarCharConverter", new HiveVarchar("1", -1), varcharConverter - .convert(Integer.valueOf(1))); + ArrayList fieldNames2 = new ArrayList(); + fieldNames2.add("firstString"); + fieldNames2.add("secondInteger"); + fieldNames2.add("thirdBoolean"); + ArrayList fieldObjectInspectors2 = new ArrayList(); + fieldObjectInspectors2 + .add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); + fieldObjectInspectors2 + .add(PrimitiveObjectInspectorFactory.javaIntObjectInspector); + fieldObjectInspectors2 + .add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector); - varcharConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaIntObjectInspector, - PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector); - assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("0", -1)), varcharConverter - .convert(Integer.valueOf(0))); - assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("1", -1)), varcharConverter - .convert(Integer.valueOf(1))); + Converter unionConverter0 = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors), + ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors2)); - varcharConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaStringObjectInspector, - PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector); - assertEquals("VarCharConverter", new HiveVarchar("hive", -1), varcharConverter - .convert(String.valueOf("hive"))); + Object convertedObject0 = unionConverter0.convert(new StandardUnion((byte)0, 1)); + StandardUnion expectedObject0 = new StandardUnion(); + expectedObject0.setTag((byte) 0); + expectedObject0.setObject("1"); - varcharConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaStringObjectInspector, - PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector); - assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("hive", -1)), varcharConverter - .convert(String.valueOf("hive"))); + assertEquals(expectedObject0, convertedObject0); - // Text - Converter textConverter = ObjectInspectorConverters.getConverter( + Converter unionConverter1 = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors), + ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors2)); + + Object convertedObject1 = unionConverter1.convert(new StandardUnion((byte)1, "1")); + StandardUnion expectedObject1 = new StandardUnion(); + expectedObject1.setTag((byte) 1); + expectedObject1.setObject(1); + + assertEquals(expectedObject1, convertedObject1); + + Converter unionConverter2 = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors), + ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors2)); + + Object convertedObject2 = unionConverter2.convert(new StandardUnion((byte)2, true)); + StandardUnion expectedObject2 = new StandardUnion(); + expectedObject2.setTag((byte) 2); + expectedObject2.setObject(true); + + assertEquals(expectedObject2, convertedObject2); + + // Union (extra fields) + ArrayList fieldNamesExtra = new ArrayList(); + fieldNamesExtra.add("firstInteger"); + fieldNamesExtra.add("secondString"); + fieldNamesExtra.add("thirdBoolean"); + ArrayList fieldObjectInspectorsExtra = new ArrayList(); + fieldObjectInspectorsExtra + .add(PrimitiveObjectInspectorFactory.javaIntObjectInspector); + fieldObjectInspectorsExtra + .add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); + fieldObjectInspectorsExtra + .add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector); + + ArrayList fieldNamesExtra2 = new ArrayList(); + fieldNamesExtra2.add("firstString"); + fieldNamesExtra2.add("secondInteger"); + ArrayList fieldObjectInspectorsExtra2 = new ArrayList(); + fieldObjectInspectorsExtra2 + .add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); + fieldObjectInspectorsExtra2 + .add(PrimitiveObjectInspectorFactory.javaIntObjectInspector); + + Converter unionConverterExtra = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectorsExtra), + ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectorsExtra2)); + + Object convertedObjectExtra = unionConverterExtra.convert(new StandardUnion((byte)2, true)); + StandardUnion expectedObjectExtra = new StandardUnion(); + expectedObjectExtra.setTag((byte) -1); + expectedObjectExtra.setObject(null); + + assertEquals(expectedObjectExtra, convertedObjectExtra); // we should get back null +} + +private void converBinary() { + Converter baConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaStringObjectInspector, + PrimitiveObjectInspectorFactory.writableBinaryObjectInspector); + assertEquals("BAConverter", new BytesWritable(new byte[] + {(byte)'h', (byte)'i',(byte)'v',(byte)'e'}), + baConverter.convert("hive")); + assertEquals("BAConverter", null, baConverter.convert(null)); + + baConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.writableStringObjectInspector, + PrimitiveObjectInspectorFactory.writableBinaryObjectInspector); + assertEquals("BAConverter", new BytesWritable(new byte[] + {(byte)'h', (byte)'i',(byte)'v',(byte)'e'}), + baConverter.convert(new Text("hive"))); + assertEquals("BAConverter", null, baConverter.convert(null)); +} + +private void convertText() { + Converter textConverter = ObjectInspectorConverters.getConverter( PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableStringObjectInspector); assertEquals("TextConverter", new Text("0"), textConverter @@ -249,6 +241,52 @@ public void testObjectInspectorConverters() throws Throwable { assertEquals("TextConverter", new Text("100.001000000000000000"), textConverter .convert(HiveDecimal.create("100.001"))); assertEquals("TextConverter", null, textConverter.convert(null)); +} + +private void convertVarChar() { + Converter varcharConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, + PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector); + assertEquals("VarCharConverter", new HiveVarchar("TRUE", -1), varcharConverter + .convert(Boolean.valueOf(true))); + assertEquals("VarCharConverter", new HiveVarchar("FALSE", -1), varcharConverter + .convert(Boolean.valueOf(false))); + + varcharConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector); + assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("TRUE", -1)), varcharConverter + .convert(Boolean.valueOf(true))); + assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("FALSE", -1)), varcharConverter + .convert(Boolean.valueOf(false))); + + varcharConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaIntObjectInspector, + PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector); + assertEquals("VarCharConverter", new HiveVarchar("0", -1), varcharConverter + .convert(Integer.valueOf(0))); + assertEquals("VarCharConverter", new HiveVarchar("1", -1), varcharConverter + .convert(Integer.valueOf(1))); + + varcharConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaIntObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector); + assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("0", -1)), varcharConverter + .convert(Integer.valueOf(0))); + assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("1", -1)), varcharConverter + .convert(Integer.valueOf(1))); + + varcharConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaStringObjectInspector, + PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector); + assertEquals("VarCharConverter", new HiveVarchar("hive", -1), varcharConverter + .convert(String.valueOf("hive"))); + + varcharConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaStringObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector); + assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("hive", -1)), varcharConverter + .convert(String.valueOf("hive"))); // Varchar PrimitiveTypeInfo varchar5TI = @@ -271,6 +309,52 @@ public void testObjectInspectorConverters() throws Throwable { varchar30OI); assertEquals("VarcharConverter", "100.001000000000000000", varcharConverter.convert(HiveDecimal.create("100.001")).toString()); +} + +private void convertChar() { + Converter charConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, + PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector); + assertEquals("CharConverter", new HiveChar("TRUE", -1), charConverter + .convert(Boolean.valueOf(true))); + assertEquals("CharConverter", new HiveChar("FALSE", -1), charConverter + .convert(Boolean.valueOf(false))); + + charConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector); + assertEquals("CharConverter", new HiveCharWritable(new HiveChar("TRUE", -1)), charConverter + .convert(Boolean.valueOf(true))); + assertEquals("CharConverter", new HiveCharWritable(new HiveChar("FALSE", -1)), charConverter + .convert(Boolean.valueOf(false))); + + charConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaIntObjectInspector, + PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector); + assertEquals("CharConverter", new HiveChar("0", -1), charConverter + .convert(Integer.valueOf(0))); + assertEquals("CharConverter", new HiveChar("1", -1), charConverter + .convert(Integer.valueOf(1))); + + charConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaIntObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector); + assertEquals("CharConverter", new HiveCharWritable(new HiveChar("0", -1)), charConverter + .convert(Integer.valueOf(0))); + assertEquals("CharConverter", new HiveCharWritable(new HiveChar("1", -1)), charConverter + .convert(Integer.valueOf(1))); + + charConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaStringObjectInspector, + PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector); + assertEquals("CharConverter", new HiveChar("hive", -1), charConverter + .convert(String.valueOf("hive"))); + + charConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaStringObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector); + assertEquals("CharConverter", new HiveCharWritable(new HiveChar("hive", -1)), charConverter + .convert(String.valueOf("hive"))); // Char PrimitiveTypeInfo char5TI = @@ -293,118 +377,86 @@ public void testObjectInspectorConverters() throws Throwable { char30OI); assertEquals("CharConverter", "100.001000000000000000 ", charConverter.convert(HiveDecimal.create("100.001")).toString()); +} - // Binary - Converter baConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaStringObjectInspector, - PrimitiveObjectInspectorFactory.writableBinaryObjectInspector); - assertEquals("BAConverter", new BytesWritable(new byte[] - {(byte)'h', (byte)'i',(byte)'v',(byte)'e'}), - baConverter.convert("hive")); - assertEquals("BAConverter", null, baConverter.convert(null)); - - baConverter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.writableStringObjectInspector, - PrimitiveObjectInspectorFactory.writableBinaryObjectInspector); - assertEquals("BAConverter", new BytesWritable(new byte[] - {(byte)'h', (byte)'i',(byte)'v',(byte)'e'}), - baConverter.convert(new Text("hive"))); - assertEquals("BAConverter", null, baConverter.convert(null)); - - // Union - ArrayList fieldNames = new ArrayList(); - fieldNames.add("firstInteger"); - fieldNames.add("secondString"); - fieldNames.add("thirdBoolean"); - ArrayList fieldObjectInspectors = new ArrayList(); - fieldObjectInspectors - .add(PrimitiveObjectInspectorFactory.javaIntObjectInspector); - fieldObjectInspectors - .add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); - fieldObjectInspectors - .add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector); - - ArrayList fieldNames2 = new ArrayList(); - fieldNames2.add("firstString"); - fieldNames2.add("secondInteger"); - fieldNames2.add("thirdBoolean"); - ArrayList fieldObjectInspectors2 = new ArrayList(); - fieldObjectInspectors2 - .add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); - fieldObjectInspectors2 - .add(PrimitiveObjectInspectorFactory.javaIntObjectInspector); - fieldObjectInspectors2 - .add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector); - - Converter unionConverter0 = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors), - ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors2)); - - Object convertedObject0 = unionConverter0.convert(new StandardUnion((byte)0, 1)); - StandardUnion expectedObject0 = new StandardUnion(); - expectedObject0.setTag((byte) 0); - expectedObject0.setObject("1"); - - assertEquals(expectedObject0, convertedObject0); - - Converter unionConverter1 = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors), - ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors2)); - - Object convertedObject1 = unionConverter1.convert(new StandardUnion((byte)1, "1")); - StandardUnion expectedObject1 = new StandardUnion(); - expectedObject1.setTag((byte) 1); - expectedObject1.setObject(1); - - assertEquals(expectedObject1, convertedObject1); - - Converter unionConverter2 = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors), - ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors2)); - - Object convertedObject2 = unionConverter2.convert(new StandardUnion((byte)2, true)); - StandardUnion expectedObject2 = new StandardUnion(); - expectedObject2.setTag((byte) 2); - expectedObject2.setObject(true); - - assertEquals(expectedObject2, convertedObject2); - - // Union (extra fields) - ArrayList fieldNamesExtra = new ArrayList(); - fieldNamesExtra.add("firstInteger"); - fieldNamesExtra.add("secondString"); - fieldNamesExtra.add("thirdBoolean"); - ArrayList fieldObjectInspectorsExtra = new ArrayList(); - fieldObjectInspectorsExtra - .add(PrimitiveObjectInspectorFactory.javaIntObjectInspector); - fieldObjectInspectorsExtra - .add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); - fieldObjectInspectorsExtra - .add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector); +private void convertDouble() { + Converter doubleConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaIntObjectInspector, + PrimitiveObjectInspectorFactory.writableDoubleObjectInspector); + assertEquals("DoubleConverter", new DoubleWritable(0), doubleConverter + .convert(Integer.valueOf(0))); + assertEquals("DoubleConverter", new DoubleWritable(1), doubleConverter + .convert(Integer.valueOf(1))); + assertEquals("DoubleConverter", null, doubleConverter.convert(null)); +} - ArrayList fieldNamesExtra2 = new ArrayList(); - fieldNamesExtra2.add("firstString"); - fieldNamesExtra2.add("secondInteger"); - ArrayList fieldObjectInspectorsExtra2 = new ArrayList(); - fieldObjectInspectorsExtra2 - .add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); - fieldObjectInspectorsExtra2 - .add(PrimitiveObjectInspectorFactory.javaIntObjectInspector); +private void convertFloat() { + Converter floatConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaIntObjectInspector, + PrimitiveObjectInspectorFactory.writableFloatObjectInspector); + assertEquals("LongConverter", new FloatWritable(0), floatConverter + .convert(Integer.valueOf(0))); + assertEquals("LongConverter", new FloatWritable(1), floatConverter + .convert(Integer.valueOf(1))); + assertEquals("LongConverter", null, floatConverter.convert(null)); +} - Converter unionConverterExtra = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectorsExtra), - ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectorsExtra2)); +private void convertLong() { + Converter longConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaIntObjectInspector, + PrimitiveObjectInspectorFactory.writableLongObjectInspector); + assertEquals("LongConverter", new LongWritable(0), longConverter + .convert(Integer.valueOf(0))); + assertEquals("LongConverter", new LongWritable(1), longConverter + .convert(Integer.valueOf(1))); + assertEquals("LongConverter", null, longConverter.convert(null)); +} - Object convertedObjectExtra = unionConverterExtra.convert(new StandardUnion((byte)2, true)); - StandardUnion expectedObjectExtra = new StandardUnion(); - expectedObjectExtra.setTag((byte) -1); - expectedObjectExtra.setObject(null); +private void convertInt() { + Converter intConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaIntObjectInspector, + PrimitiveObjectInspectorFactory.writableIntObjectInspector); + assertEquals("IntConverter", new IntWritable(0), intConverter + .convert(Integer.valueOf(0))); + assertEquals("IntConverter", new IntWritable(1), intConverter + .convert(Integer.valueOf(1))); + assertEquals("IntConverter", null, intConverter.convert(null)); +} - assertEquals(expectedObjectExtra, convertedObjectExtra); // we should get back null +private void convertShort() { + Converter shortConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaIntObjectInspector, + PrimitiveObjectInspectorFactory.writableShortObjectInspector); + assertEquals("ShortConverter", new ShortWritable((short) 0), + shortConverter.convert(Integer.valueOf(0))); + assertEquals("ShortConverter", new ShortWritable((short) 1), + shortConverter.convert(Integer.valueOf(1))); + assertEquals("ShortConverter", null, shortConverter.convert(null)); +} - } catch (Throwable e) { - e.printStackTrace(); - throw e; - } +private void convertByte() { + Converter byteConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaIntObjectInspector, + PrimitiveObjectInspectorFactory.writableByteObjectInspector); + assertEquals("ByteConverter", new ByteWritable((byte) 0), byteConverter + .convert(Integer.valueOf(0))); + assertEquals("ByteConverter", new ByteWritable((byte) 1), byteConverter + .convert(Integer.valueOf(1))); + assertEquals("ByteConverter", null, byteConverter.convert(null)); +} - } +private void convertBoolean() { + Converter booleanConverter = ObjectInspectorConverters.getConverter( + PrimitiveObjectInspectorFactory.javaIntObjectInspector, + PrimitiveObjectInspectorFactory.writableBooleanObjectInspector); + assertEquals("BooleanConverter", new BooleanWritable(false), + booleanConverter.convert(Integer.valueOf(0))); + assertEquals("BooleanConverter", new BooleanWritable(true), + booleanConverter.convert(Integer.valueOf(1))); + assertEquals("BooleanConverter", null, booleanConverter.convert(null)); +} + @Test public void testGetConvertedOI() throws Throwable { // Try with types that have type params PrimitiveTypeInfo varchar5TI = @@ -422,4 +474,4 @@ public void testGetConvertedOI() throws Throwable { VarcharTypeInfo vcParams = (VarcharTypeInfo) poi.getTypeInfo(); assertEquals("varchar length doesn't match", 5, vcParams.getLength()); } -} \ No newline at end of file +} diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorUtils.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorUtils.java index 23ca194b6f..0e88c32c25 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorUtils.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorUtils.java @@ -21,19 +21,23 @@ import java.util.Arrays; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.thrift.test.Complex; import org.apache.hadoop.hive.serde2.thrift.test.IntString; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import org.junit.Test; /** * TestObjectInspectorUtils. * */ -public class TestObjectInspectorUtils extends TestCase { +public class TestObjectInspectorUtils { + @Test public void testCompareFloatingNumberSignedZero() { PrimitiveObjectInspector doubleOI = PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.DOUBLE); @@ -56,6 +60,7 @@ public void testCompareFloatingNumberSignedZero() { assertEquals(0, ObjectInspectorUtils.compare(f2, floatOI, f2, floatOI)); } + @Test public void testObjectInspectorUtils() throws Throwable { try { ObjectInspector oi1 = ObjectInspectorFactory @@ -131,6 +136,7 @@ public void testObjectInspectorUtils() throws Throwable { } } + @Test public void testBucketIdGeneration() { ArrayList fieldNames = new ArrayList(); fieldNames.add("firstInteger"); diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestProtocolBuffersObjectInspectors.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestProtocolBuffersObjectInspectors.java index b4efdf865f..374206592d 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestProtocolBuffersObjectInspectors.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestProtocolBuffersObjectInspectors.java @@ -21,19 +21,23 @@ import java.util.Arrays; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.proto.test.Complexpb.Complex; import org.apache.hadoop.hive.serde2.proto.test.Complexpb.IntString; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import org.junit.Test; /** * TestProtocolBuffersObjectInspectors. * */ -public class TestProtocolBuffersObjectInspectors extends TestCase { +public class TestProtocolBuffersObjectInspectors { + @Test public void testProtocolBuffersObjectInspectors() throws Throwable { try { diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestReflectionObjectInspectors.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestReflectionObjectInspectors.java index 2faf34002a..251b31af65 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestReflectionObjectInspectors.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestReflectionObjectInspectors.java @@ -35,17 +35,20 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaStringObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.thrift.test.Complex; -import org.junit.Test; - import com.google.common.collect.Lists; -import junit.framework.TestCase; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import org.junit.Test; /** * TestReflectionObjectInspectors. * */ -public class TestReflectionObjectInspectors extends TestCase { +public class TestReflectionObjectInspectors { @Test public void testReflectionObjectInspectors() throws Throwable { diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestSimpleMapEqualComparer.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestSimpleMapEqualComparer.java index de5ae6834b..8048f37d6d 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestSimpleMapEqualComparer.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestSimpleMapEqualComparer.java @@ -32,9 +32,15 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions; import org.apache.hadoop.io.Text; -import junit.framework.TestCase; -public class TestSimpleMapEqualComparer extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import org.junit.Test; + +/** + * SimpleMapEqualComparer Test. + */ +public class TestSimpleMapEqualComparer { public static class IntegerStringMapHolder { Map mMap; @@ -44,6 +50,7 @@ public IntegerStringMapHolder() { } } + @Test public void testSameType() { // empty maps IntegerStringMapHolder o1 = new IntegerStringMapHolder(); @@ -88,6 +95,7 @@ Object serializeAndDeserialize(TextStringMapHolder o1, StructObjectInspector oi1 return serde.deserialize(t); } + @Test public void testCompatibleType() throws SerDeException, IOException { // empty maps TextStringMapHolder o1 = new TextStringMapHolder(); @@ -141,6 +149,7 @@ Object serializeAndDeserialize(StringTextMapHolder o1, StructObjectInspector oi1 return serde.deserialize(t); } + @Test public void testIncompatibleType() throws SerDeException, IOException { // empty maps StringTextMapHolder o1 = new StringTextMapHolder(); diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java index 2c488b0945..04ffbb8906 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java @@ -21,7 +21,7 @@ import java.util.HashMap; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveVarchar; @@ -47,12 +47,18 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import org.junit.Test; /** * TestStandardObjectInspectors. * */ -public class TestStandardObjectInspectors extends TestCase { +public class TestStandardObjectInspectors { void doTestStandardPrimitiveObjectInspector(Class writableClass, Class javaClass) throws Throwable { @@ -82,6 +88,7 @@ void doTestStandardPrimitiveObjectInspector(Class writableClass, } } + @Test public void testStandardPrimitiveObjectInspector() throws Throwable { try { doTestStandardPrimitiveObjectInspector(NullWritable.class, Void.class); @@ -136,6 +143,7 @@ void doTestJavaPrimitiveObjectInspector(Class writableClass, } } + @Test public void testJavaPrimitiveObjectInspector() throws Throwable { try { doTestJavaPrimitiveObjectInspector(NullWritable.class, Void.class, null); @@ -163,6 +171,7 @@ public void testJavaPrimitiveObjectInspector() throws Throwable { } } + @Test public void testStandardListObjectInspector() throws Throwable { try { StandardListObjectInspector loi1 = ObjectInspectorFactory @@ -218,6 +227,7 @@ public void testStandardListObjectInspector() throws Throwable { } + @Test public void testStandardMapObjectInspector() throws Throwable { try { StandardMapObjectInspector moi1 = ObjectInspectorFactory @@ -279,6 +289,7 @@ public void testStandardMapObjectInspector() throws Throwable { } @SuppressWarnings("unchecked") + @Test public void testStandardStructObjectInspector() throws Throwable { try { // Test StandardObjectInspector both with field comments and without @@ -383,6 +394,7 @@ private void doStandardObjectInspectorTest(boolean testComments) { } @SuppressWarnings("unchecked") + @Test public void testStandardUnionObjectInspector() throws Throwable { try { ArrayList objectInspectors = new ArrayList(); diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java index de291fa6d8..9fefd665bf 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java @@ -30,14 +30,19 @@ import org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion; import org.apache.hadoop.hive.serde2.thrift.test.SetIntString; -import junit.framework.TestCase; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import org.junit.Test; /** * TestThriftObjectInspectors. * */ -public class TestThriftObjectInspectors extends TestCase { +public class TestThriftObjectInspectors { + @Test public void testThriftObjectInspectors() throws Throwable { try { @@ -129,6 +134,7 @@ public void testThriftObjectInspectors() throws Throwable { } @SuppressWarnings("unchecked") + @Test public void testThriftSetObjectInspector() throws Throwable { try { diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestUnionStructObjectInspector.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestUnionStructObjectInspector.java index efc96931ca..d785f6264a 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestUnionStructObjectInspector.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestUnionStructObjectInspector.java @@ -21,17 +21,21 @@ import java.util.Arrays; import java.util.List; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import org.junit.Test; /** * TestUnionStructObjectInspector. * */ -public class TestUnionStructObjectInspector extends TestCase { +public class TestUnionStructObjectInspector { + @Test public void testUnionStructObjectInspector() throws Throwable { try { ArrayList fieldNames1 = new ArrayList(); diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorFactory.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorFactory.java index 022b642cf8..1b9237baa8 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorFactory.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorFactory.java @@ -18,13 +18,19 @@ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestPrimitiveObjectInspectorFactory extends TestCase { +/** + * PrimitiveObjectInspectorFactory Test. + */ +public class TestPrimitiveObjectInspectorFactory { + @Test public void testGetPrimitiveWritableObjectInspector() { // even without type params, return a default OI for varchar PrimitiveObjectInspector poi = PrimitiveObjectInspectorFactory @@ -37,6 +43,7 @@ public void testGetPrimitiveWritableObjectInspector() { assertEquals(poi, PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector); } + @Test public void testGetPrimitiveJavaObjectInspector() { // even without type params, return a default OI for varchar PrimitiveObjectInspector poi = PrimitiveObjectInspectorFactory diff --git serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java index c731a579b7..25c3f6001d 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java +++ serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java @@ -31,11 +31,16 @@ import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; -import org.junit.Test; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import org.junit.Test; -public class TestPrimitiveObjectInspectorUtils extends TestCase { +/** + * PrimitiveObjectInspectorUtils Test. + */ +public class TestPrimitiveObjectInspectorUtils { @Test public void testGetPrimitiveGrouping() { diff --git serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForDate.java serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForDate.java index af81fe30c8..6d5a78be58 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForDate.java +++ serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForDate.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hive.serde2.teradata; import com.google.common.io.BaseEncoding; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.io.DateWritableV2; @@ -29,21 +29,25 @@ import java.util.Arrays; import java.util.List; import java.util.Properties; +import org.junit.Before; +import org.junit.Test; /** * Test the data type DATE for Teradata binary format. */ -public class TestTeradataBinarySerdeForDate extends TestCase { +public class TestTeradataBinarySerdeForDate { private final TeradataBinarySerde serde = new TeradataBinarySerde(); private final Properties props = new Properties(); - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { props.setProperty(serdeConstants.LIST_COLUMNS, "TD_DATE"); props.setProperty(serdeConstants.LIST_COLUMN_TYPES, "date"); serde.initialize(null, props); } + @Test public void testTimestampBefore1900() throws Exception { //0060-01-01 @@ -59,6 +63,7 @@ public void testTimestampBefore1900() throws Exception { Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes())); } + @Test public void testTimestampAfter1900() throws Exception { //9999-01-01 diff --git serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForDecimal.java serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForDecimal.java index 6abdd3f722..28e717e4a7 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForDecimal.java +++ serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForDecimal.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hive.serde2.teradata; import com.google.common.io.BaseEncoding; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.io.BytesWritable; @@ -28,22 +28,26 @@ import java.util.Arrays; import java.util.List; import java.util.Properties; +import org.junit.Before; +import org.junit.Test; /** * Test the data type DECIMAL for Teradata binary format. */ -public class TestTeradataBinarySerdeForDecimal extends TestCase { +public class TestTeradataBinarySerdeForDecimal { private final TeradataBinarySerde serde = new TeradataBinarySerde(); private final Properties props = new Properties(); - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { props.setProperty(serdeConstants.LIST_COLUMNS, "TD_DECIMAL"); props.setProperty(serdeConstants.LIST_COLUMN_TYPES, "decimal(9,5)"); serde.initialize(null, props); } + @Test public void testPositiveFraction() throws Exception { BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode("0064000000")); @@ -54,6 +58,7 @@ public void testPositiveFraction() throws Exception { Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes())); } + @Test public void testNegativeFraction() throws Exception { BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode("009cffffff")); @@ -64,6 +69,7 @@ public void testNegativeFraction() throws Exception { Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes())); } + @Test public void testPositiveNumber1() throws Exception { BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode("00a0860100")); @@ -74,6 +80,7 @@ public void testPositiveNumber1() throws Exception { Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes())); } + @Test public void testNegativeNumber1() throws Exception { BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode("006079feff")); @@ -84,6 +91,7 @@ public void testNegativeNumber1() throws Exception { Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes())); } + @Test public void testPositiveNumber2() throws Exception { BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode("0080969800")); @@ -94,6 +102,7 @@ public void testPositiveNumber2() throws Exception { Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes())); } + @Test public void testNegativeNumber2() throws Exception { BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode("000065c4e0")); diff --git serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForTimeStamp.java serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForTimeStamp.java index a6cf2c14e9..fa5d1cab92 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForTimeStamp.java +++ serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeForTimeStamp.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hive.serde2.teradata; import com.google.common.io.BaseEncoding; -import junit.framework.TestCase; + import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.io.TimestampWritableV2; @@ -29,20 +29,24 @@ import java.util.Arrays; import java.util.List; import java.util.Properties; +import org.junit.Before; +import org.junit.Test; /** * Test the data type TIMESTAMP for Teradata binary format. */ -public class TestTeradataBinarySerdeForTimeStamp extends TestCase { +public class TestTeradataBinarySerdeForTimeStamp { private final TeradataBinarySerde serde = new TeradataBinarySerde(); private final Properties props = new Properties(); - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { props.setProperty(serdeConstants.LIST_COLUMNS, "TD_TIMESTAMP"); props.setProperty(serdeConstants.LIST_COLUMN_TYPES, "timestamp"); } + @Test public void testTimestampPrecision6() throws Exception { props.setProperty(TeradataBinarySerde.TD_TIMESTAMP_PRECISION, "6"); serde.initialize(null, props); @@ -65,6 +69,7 @@ public void testTimestampPrecision6() throws Exception { Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes())); } + @Test public void testTimestampPrecision0() throws Exception { props.setProperty(TeradataBinarySerde.TD_TIMESTAMP_PRECISION, "0"); serde.initialize(null, props); @@ -87,6 +92,7 @@ public void testTimestampPrecision0() throws Exception { Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes())); } + @Test public void testTimestampPrecision3() throws Exception { props.setProperty(TeradataBinarySerde.TD_TIMESTAMP_PRECISION, "3"); serde.initialize(null, props); diff --git serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeGeneral.java serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeGeneral.java index c50ef7082d..e392a989ae 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeGeneral.java +++ serde/src/test/org/apache/hadoop/hive/serde2/teradata/TestTeradataBinarySerdeGeneral.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hive.serde2.teradata; import com.google.common.io.BaseEncoding; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritableV2; @@ -37,16 +37,19 @@ import java.util.Arrays; import java.util.List; import java.util.Properties; +import org.junit.Before; +import org.junit.Test; /** * Test all the data types supported for Teradata Binary Format. */ -public class TestTeradataBinarySerdeGeneral extends TestCase { +public class TestTeradataBinarySerdeGeneral { private final TeradataBinarySerde serde = new TeradataBinarySerde(); private final Properties props = new Properties(); - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { props.setProperty(serdeConstants.LIST_COLUMNS, "TD_CHAR, TD_VARCHAR, TD_BIGINT, TD_INT, TD_SMALLINT, TD_BYTEINT, " + "TD_FLOAT,TD_DECIMAL,TD_DATE, TD_TIMESTAMP, TD_VARBYTE"); @@ -56,6 +59,7 @@ protected void setUp() throws Exception { serde.initialize(null, props); } + @Test public void testDeserializeAndSerialize() throws Exception { BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode( "00004e6f762020202020201b006120646179203d2031312f31312f31312020202020202020203435ec10000000000000c5feffff" @@ -81,6 +85,7 @@ public void testDeserializeAndSerialize() throws Exception { Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes())); } + @Test public void testDeserializeAndSerializeWithNull() throws Exception { //null bitmap: 0160 -> 00000001 01100000, 7th, 9th, 10th is null BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode( @@ -98,6 +103,7 @@ public void testDeserializeAndSerializeWithNull() throws Exception { Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes())); } + @Test public void testDeserializeAndSerializeAllNull() throws Exception { BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode( "ffe0202020202020202020000000000000000000000000000000000000000000000000000000000000000000000000000000000" @@ -119,6 +125,7 @@ public void testDeserializeAndSerializeAllNull() throws Exception { Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes())); } + @Test public void testDeserializeCorruptedRecord() throws Exception { BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode( "00004e6f762020202020201b006120646179203d2031312f31312f31312020202020202020203435ec10000000000000c5feff" diff --git serde/src/test/org/apache/hadoop/hive/serde2/typeinfo/TestTypeInfoUtils.java serde/src/test/org/apache/hadoop/hive/serde2/typeinfo/TestTypeInfoUtils.java index 5f8ff7b7c1..db25db2195 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/typeinfo/TestTypeInfoUtils.java +++ serde/src/test/org/apache/hadoop/hive/serde2/typeinfo/TestTypeInfoUtils.java @@ -18,12 +18,17 @@ package org.apache.hadoop.hive.serde2.typeinfo; -import junit.framework.TestCase; + import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestTypeInfoUtils extends TestCase { +/** + * TypeInfoUtils Test. + */ +public class TestTypeInfoUtils { static void parseTypeString(String typeString, boolean exceptionExpected) { boolean caughtException = false; @@ -35,6 +40,7 @@ static void parseTypeString(String typeString, boolean exceptionExpected) { assertEquals("parsing typestring " + typeString, exceptionExpected, caughtException); } + @Test public void testTypeInfoParser() { String[] validTypeStrings = { "int", @@ -65,6 +71,7 @@ public void testTypeInfoParser() { } } + @Test public void testQualifiedTypeNoParams() { boolean caughtException = false; try { @@ -94,6 +101,7 @@ public DecimalTestCase(String typeString, int expectedPrecision, int expectedSca } } + @Test public void testDecimal() { DecimalTestCase[] testCases = { new DecimalTestCase("decimal", 10, 0), diff --git service/src/test/org/apache/hive/http/TestJdbcJarDownloadServlet.java service/src/test/org/apache/hive/http/TestJdbcJarDownloadServlet.java index f7ad1fdddf..521761013a 100644 --- service/src/test/org/apache/hive/http/TestJdbcJarDownloadServlet.java +++ service/src/test/org/apache/hive/http/TestJdbcJarDownloadServlet.java @@ -23,14 +23,18 @@ import javax.servlet.http.HttpServletResponse; -import org.junit.Test; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import junit.framework.TestCase; -public class TestJdbcJarDownloadServlet extends TestCase { +import org.junit.Test; + +/** + * TestJdbcJarDownloadServlet. + * + */ +public class TestJdbcJarDownloadServlet { @Test public void testNoFileFound() throws IOException { @@ -43,4 +47,4 @@ public void testNoFileFound() throws IOException { verify(mockResponse, times(1)).setContentType("application/java-archive"); verify(mockResponse, times(1)).sendError(HttpServletResponse.SC_NOT_FOUND); } -} \ No newline at end of file +} diff --git service/src/test/org/apache/hive/service/TestCookieSigner.java service/src/test/org/apache/hive/service/TestCookieSigner.java index b1aa0d84ca..aec6d47000 100644 --- service/src/test/org/apache/hive/service/TestCookieSigner.java +++ service/src/test/org/apache/hive/service/TestCookieSigner.java @@ -20,7 +20,7 @@ import java.util.Random; -import junit.framework.TestCase; + import org.junit.After; import org.junit.Before; @@ -30,7 +30,7 @@ * CLIServiceTest. * */ -public class TestCookieSigner extends TestCase { +public class TestCookieSigner { protected static CookieSigner cs; private static final Random RAN = new Random(); diff --git service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java index 8bfa7dc157..e4b0ba05a2 100644 --- service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java +++ service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java @@ -17,7 +17,7 @@ */ package org.apache.hive.service.auth; -import junit.framework.TestCase; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -25,13 +25,20 @@ import org.apache.hive.service.cli.thrift.ThriftCLIService; import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService; import org.apache.thrift.TProcessorFactory; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import org.junit.Test; -public class TestPlainSaslHelper extends TestCase { +/** + * TestPlainSaslHelper. + */ +public class TestPlainSaslHelper { /** * Test setting {@link HiveConf.ConfVars}} config parameter * HIVE_SERVER2_ENABLE_DOAS for unsecure mode */ + @Test public void testDoAsSetting(){ HiveConf hconf = new HiveConf(); diff --git service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java index 487a5d492d..83748c3eb1 100644 --- service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java +++ service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java @@ -25,7 +25,7 @@ import java.util.HashSet; import java.util.Set; -import junit.framework.TestCase; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -35,7 +35,10 @@ import org.junit.Assert; import org.junit.Test; -public class TestSessionCleanup extends TestCase { +/** + * TestSessionCleanup. + */ +public class TestSessionCleanup { @Test // This is to test session temporary files are cleaned up after HIVE-11768 diff --git service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java index 9d00ec4353..4a3803bc95 100644 --- service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java +++ service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java @@ -22,7 +22,7 @@ import java.util.HashMap; import java.util.Map; -import junit.framework.TestCase; + import org.apache.commons.io.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -38,7 +38,10 @@ import org.junit.Before; import org.junit.Test; -public class TestSessionGlobalInitFile extends TestCase { +/** + * TestSessionGlobalInitFile. + */ +public class TestSessionGlobalInitFile { private FakeEmbeddedThriftBinaryCLIService service; private ThriftCLIServiceClient client; @@ -65,7 +68,7 @@ public CLIService getService() { @Before public void setUp() throws Exception { - super.setUp(); + // create and put .hiverc sample file to default directory initFile = File.createTempFile("test", "hive"); diff --git service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java index 5cbb0bfa6b..bb0445d364 100644 --- service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java +++ service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java @@ -24,7 +24,7 @@ import java.util.concurrent.atomic.AtomicInteger; import junit.framework.Assert; -import junit.framework.TestCase; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -36,7 +36,10 @@ import org.junit.Before; import org.junit.Test; -public class TestSessionHooks extends TestCase { +/** + * TestSessionHooks. + */ +public class TestSessionHooks { private static String sessionUserName = "user1"; private EmbeddedThriftBinaryCLIService service; @@ -56,10 +59,9 @@ public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLExcepti } } - @Override @Before public void setUp() throws Exception { - super.setUp(); + SessionHookTest.runCount.set(0); System.setProperty(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, TestSessionHooks.SessionHookTest.class.getName());