diff --git hbase-handler/src/test/org/apache/hadoop/hive/hbase/LazyHBaseCellMapTest.java hbase-handler/src/test/org/apache/hadoop/hive/hbase/LazyHBaseCellMapTest.java deleted file mode 100644 index eb13e14..0000000 --- hbase-handler/src/test/org/apache/hadoop/hive/hbase/LazyHBaseCellMapTest.java +++ /dev/null @@ -1,72 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.hbase; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hive.serde2.lazy.LazyFactory; -import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; -import org.apache.hadoop.io.Text; - -import junit.framework.TestCase; - -public class LazyHBaseCellMapTest extends TestCase { - public static final byte[] TEST_ROW = Bytes.toBytes("test-row"); - public static final byte[] COLUMN_FAMILY = Bytes.toBytes("a"); - public static final String QUAL_PREFIX = "col_"; - - - public void testInitColumnPrefix() throws Exception { - Text nullSequence = new Text("\\N"); - ObjectInspector oi = LazyFactory.createLazyObjectInspector( - TypeInfoUtils.getTypeInfosFromTypeString("map").get(0), - new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0); - - LazyHBaseCellMap b = new LazyHBaseCellMap((LazyMapObjectInspector) oi); - - // Initialize a result - Cell[] cells = new KeyValue[2]; - - final String col1="1"; - final String col2="2"; - cells[0] = new KeyValue(TEST_ROW, COLUMN_FAMILY, - Bytes.toBytes(QUAL_PREFIX+col1), Bytes.toBytes("cfacol1")); - cells[1]=new KeyValue(TEST_ROW, COLUMN_FAMILY, - Bytes.toBytes(QUAL_PREFIX+col2), Bytes.toBytes("cfacol2")); - - Result r = Result.create(cells); - - List mapBinaryStorage = new ArrayList(); - mapBinaryStorage.add(false); - mapBinaryStorage.add(false); - - b.init(r, COLUMN_FAMILY, mapBinaryStorage, Bytes.toBytes(QUAL_PREFIX), true); - - assertNotNull(b.getMapValueElement(new Text(col1))); - assertNotNull(b.getMapValueElement(new Text(col2))); - - } -} diff --git hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseCellMap.java hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseCellMap.java new file mode 100644 index 0000000..a15f53c --- /dev/null +++ hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseCellMap.java @@ -0,0 +1,73 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.hbase; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hive.serde2.lazy.LazyFactory; +import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import org.apache.hadoop.io.Text; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.junit.Assert.assertNotNull; + +public class TestLazyHBaseCellMap { + public static final byte[] TEST_ROW = Bytes.toBytes("test-row"); + public static final byte[] COLUMN_FAMILY = Bytes.toBytes("a"); + public static final String QUAL_PREFIX = "col_"; + + @Test + public void testInitColumnPrefix() throws Exception { + Text nullSequence = new Text("\\N"); + ObjectInspector oi = LazyFactory.createLazyObjectInspector( + TypeInfoUtils.getTypeInfosFromTypeString("map").get(0), + new byte[]{(byte) 1, (byte) 2}, 0, nullSequence, false, (byte) 0); + + LazyHBaseCellMap b = new LazyHBaseCellMap((LazyMapObjectInspector) oi); + + // Initialize a result + Cell[] cells = new KeyValue[2]; + + final String col1 = "1"; + final String col2 = "2"; + cells[0] = new KeyValue(TEST_ROW, COLUMN_FAMILY, + Bytes.toBytes(QUAL_PREFIX + col1), Bytes.toBytes("cfacol1")); + cells[1] = new KeyValue(TEST_ROW, COLUMN_FAMILY, + Bytes.toBytes(QUAL_PREFIX + col2), Bytes.toBytes("cfacol2")); + + Result r = Result.create(cells); + + List mapBinaryStorage = new ArrayList(); + mapBinaryStorage.add(false); + mapBinaryStorage.add(false); + + b.init(r, COLUMN_FAMILY, mapBinaryStorage, Bytes.toBytes(QUAL_PREFIX), true); + + assertNotNull(b.getMapValueElement(new Text(col1))); + assertNotNull(b.getMapValueElement(new Text(col2))); + + } +} diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java index f259800..69b70e2 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java @@ -18,15 +18,6 @@ */ package org.apache.hive.hcatalog.cli; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; @@ -45,14 +36,23 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer; -import org.apache.hive.hcatalog.mapreduce.HCatBaseTest; +import org.apache.hive.hcatalog.mapreduce.TestHCatBase; import org.apache.thrift.TException; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class TestSemanticAnalysis extends HCatBaseTest { +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +public class TestSemanticAnalysis extends TestHCatBase { private static final Logger LOG = LoggerFactory.getLogger(TestSemanticAnalysis.class); private static final String TBL_NAME = "junit_sem_analysis"; diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java index 4f92b68..59fb904 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java @@ -44,11 +44,11 @@ import org.apache.hive.hcatalog.data.transfer.ReaderContext; import org.apache.hive.hcatalog.data.transfer.WriteEntity; import org.apache.hive.hcatalog.data.transfer.WriterContext; -import org.apache.hive.hcatalog.mapreduce.HCatBaseTest; +import org.apache.hive.hcatalog.mapreduce.TestHCatBase; import org.junit.Assert; import org.junit.Test; -public class TestReaderWriter extends HCatBaseTest { +public class TestReaderWriter extends TestHCatBase { @Test public void test() throws MetaException, CommandNeedRetryException, diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java deleted file mode 100644 index 6cc0a96..0000000 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java +++ /dev/null @@ -1,115 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.hive.hcatalog.mapreduce; - -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hive.cli.CliSessionState; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.ql.Driver; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.util.Shell; -import org.apache.hive.hcatalog.common.HCatUtil; -import org.apache.pig.ExecType; -import org.apache.pig.PigServer; -import org.apache.pig.backend.executionengine.ExecException; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.util.Properties; - -/** - * Simplify writing HCatalog tests that require a HiveMetaStore. - */ -public class HCatBaseTest { - protected static final Logger LOG = LoggerFactory.getLogger(HCatBaseTest.class); - public static final String TEST_DATA_DIR = HCatUtil.makePathASafeFileName(System.getProperty("user.dir") + - "/build/test/data/" + HCatBaseTest.class.getCanonicalName() + "-" + System.currentTimeMillis()); - protected static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse"; - - protected HiveConf hiveConf = null; - protected Driver driver = null; - protected HiveMetaStoreClient client = null; - - @BeforeClass - public static void setUpTestDataDir() throws Exception { - LOG.info("Using warehouse directory " + TEST_WAREHOUSE_DIR); - File f = new File(TEST_WAREHOUSE_DIR); - if (f.exists()) { - FileUtil.fullyDelete(f); - } - Assert.assertTrue(new File(TEST_WAREHOUSE_DIR).mkdirs()); - } - - @Before - public void setUp() throws Exception { - if (driver == null) { - setUpHiveConf(); - driver = new Driver(hiveConf); - client = new HiveMetaStoreClient(hiveConf); - SessionState.start(new CliSessionState(hiveConf)); - } - } - - /** - * Create a new HiveConf and set properties necessary for unit tests. - */ - protected void setUpHiveConf() { - hiveConf = new HiveConf(this.getClass()); - hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, ""); - hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, ""); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); - hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, TEST_WAREHOUSE_DIR); - hiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true); - hiveConf - .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, - "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); - } - - protected void logAndRegister(PigServer server, String query) throws IOException { - logAndRegister(server, query, 1); - } - protected void logAndRegister(PigServer server, String query, int lineNumber) throws IOException { - assert lineNumber > 0 : "(lineNumber > 0) is false"; - LOG.info("Registering pig query: " + query); - server.registerQuery(query, lineNumber); - } - - /** - * creates PigServer in LOCAL mode. - * http://pig.apache.org/docs/r0.12.0/perf.html#error-handling - * @param stopOnFailure equivalent of "-stop_on_failure" command line arg, setting to 'true' makes - * debugging easier - */ - public static PigServer createPigServer(boolean stopOnFailure) throws ExecException { - if(stopOnFailure) { - Properties p = new Properties(); - p.put("stop.on.failure", Boolean.TRUE.toString()); - return new PigServer(ExecType.LOCAL, p); - } - return new PigServer(ExecType.LOCAL); - } -} diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java index ae56ff7..362c003 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java @@ -85,7 +85,7 @@ * native storage formats enumerated using {@link org.apache.hive.hcatalog.mapreduce.StorageFormats}. */ @RunWith(Parameterized.class) -public abstract class HCatMapReduceTest extends HCatBaseTest { +public abstract class HCatMapReduceTest extends TestHCatBase { private static final Logger LOG = LoggerFactory.getLogger(HCatMapReduceTest.class); protected static String dbName = Warehouse.DEFAULT_DATABASE_NAME; diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatBase.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatBase.java new file mode 100644 index 0000000..f903d40 --- /dev/null +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatBase.java @@ -0,0 +1,114 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hive.hcatalog.mapreduce; + +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hive.hcatalog.common.HCatUtil; +import org.apache.pig.ExecType; +import org.apache.pig.PigServer; +import org.apache.pig.backend.executionengine.ExecException; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.util.Properties; + +/** + * Simplify writing HCatalog tests that require a HiveMetaStore. + */ +public class TestHCatBase { + protected static final Logger LOG = LoggerFactory.getLogger(TestHCatBase.class); + public static final String TEST_DATA_DIR = HCatUtil.makePathASafeFileName(System.getProperty("user.dir") + + "/build/test/data/" + TestHCatBase.class.getCanonicalName() + "-" + System.currentTimeMillis()); + protected static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse"; + + protected HiveConf hiveConf = null; + protected Driver driver = null; + protected HiveMetaStoreClient client = null; + + @BeforeClass + public static void setUpTestDataDir() throws Exception { + LOG.info("Using warehouse directory " + TEST_WAREHOUSE_DIR); + File f = new File(TEST_WAREHOUSE_DIR); + if (f.exists()) { + FileUtil.fullyDelete(f); + } + Assert.assertTrue(new File(TEST_WAREHOUSE_DIR).mkdirs()); + } + + @Before + public void setUp() throws Exception { + if (driver == null) { + setUpHiveConf(); + driver = new Driver(hiveConf); + client = new HiveMetaStoreClient(hiveConf); + SessionState.start(new CliSessionState(hiveConf)); + } + } + + /** + * Create a new HiveConf and set properties necessary for unit tests. + */ + protected void setUpHiveConf() { + hiveConf = new HiveConf(this.getClass()); + hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, ""); + hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, ""); + hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, TEST_WAREHOUSE_DIR); + hiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); + hiveConf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true); + hiveConf + .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, + "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); + } + + protected void logAndRegister(PigServer server, String query) throws IOException { + logAndRegister(server, query, 1); + } + protected void logAndRegister(PigServer server, String query, int lineNumber) throws IOException { + assert lineNumber > 0 : "(lineNumber > 0) is false"; + LOG.info("Registering pig query: " + query); + server.registerQuery(query, lineNumber); + } + + /** + * creates PigServer in LOCAL mode. + * http://pig.apache.org/docs/r0.12.0/perf.html#error-handling + * @param stopOnFailure equivalent of "-stop_on_failure" command line arg, setting to 'true' makes + * debugging easier + */ + public static PigServer createPigServer(boolean stopOnFailure) throws ExecException { + if(stopOnFailure) { + Properties p = new Properties(); + p.put("stop.on.failure", Boolean.TRUE.toString()); + return new PigServer(ExecType.LOCAL, p); + } + return new PigServer(ExecType.LOCAL); + } +} diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatInputFormat.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatInputFormat.java index fe02674..b5c2a0a 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatInputFormat.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatInputFormat.java @@ -40,7 +40,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; -public class TestHCatInputFormat extends HCatBaseTest { +public class TestHCatInputFormat extends TestHCatBase { private boolean setUpComplete = false; diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatInputFormatMethods.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatInputFormatMethods.java index edcb558..807cd8b 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatInputFormatMethods.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatInputFormatMethods.java @@ -28,7 +28,7 @@ import org.junit.Before; import org.junit.Test; -public class TestHCatInputFormatMethods extends HCatBaseTest { +public class TestHCatInputFormatMethods extends TestHCatBase { private boolean setUpComplete = false; diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java index bbfd22b..2e25647 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java @@ -24,7 +24,7 @@ import org.junit.Test; -public class TestInputJobInfo extends HCatBaseTest { +public class TestInputJobInfo extends TestHCatBase { @Test public void test4ArgCreate() throws Exception { diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java index 59d2efb..aa613e0 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java @@ -52,7 +52,7 @@ import org.apache.hive.hcatalog.common.HCatUtil; import org.apache.hive.hcatalog.data.Pair; import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; -import org.apache.hive.hcatalog.mapreduce.HCatBaseTest; +import org.apache.hive.hcatalog.mapreduce.TestHCatBase; import org.apache.pig.ExecType; import org.apache.pig.PigRunner; import org.apache.pig.PigServer; @@ -70,7 +70,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public abstract class AbstractHCatLoaderTest extends HCatBaseTest { +public abstract class AbstractHCatLoaderTest extends TestHCatBase { private static final Logger LOG = LoggerFactory.getLogger(AbstractHCatLoaderTest.class); private static final String BASIC_FILE_NAME = TEST_DATA_DIR + "/basic.input.data"; private static final String COMPLEX_FILE_NAME = TEST_DATA_DIR + "/complex.input.data"; diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java index 4f7cf2b..ae62a9c 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hive.hcatalog.HcatTestUtils; -import org.apache.hive.hcatalog.mapreduce.HCatBaseTest; +import org.apache.hive.hcatalog.mapreduce.TestHCatBase; import org.apache.pig.EvalFunc; import org.apache.pig.ExecType; import org.apache.pig.PigException; @@ -53,7 +53,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public abstract class AbstractHCatStorerTest extends HCatBaseTest { +public abstract class AbstractHCatStorerTest extends TestHCatBase { static Logger LOG = LoggerFactory.getLogger(AbstractHCatStorerTest.class); static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data"; String storageFormat; diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java index e15b3e5..b1d62d3 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java @@ -21,7 +21,7 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hive.hcatalog.HcatTestUtils; -import org.apache.hive.hcatalog.mapreduce.HCatBaseTest; +import org.apache.hive.hcatalog.mapreduce.TestHCatBase; import org.apache.pig.ExecType; import org.apache.pig.PigServer; import org.apache.pig.backend.executionengine.ExecJob; @@ -42,7 +42,7 @@ * Test that require both HCatLoader and HCatStorer. For read or write only functionality, * please consider @{link TestHCatLoader} or @{link TestHCatStorer}. */ -public class TestHCatLoaderStorer extends HCatBaseTest { +public class TestHCatLoaderStorer extends TestHCatBase { private static final Logger LOG = LoggerFactory.getLogger(TestHCatLoaderStorer.class); /** diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java index 2cf14aa..99dbd37 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java @@ -27,8 +27,8 @@ import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hive.hcatalog.HcatTestUtils; -import org.apache.hive.hcatalog.mapreduce.HCatBaseTest; +import org.apache.hive.hcatalog.mapreduce.TestHCatBase; import org.apache.pig.ExecType; import org.apache.pig.PigServer; @@ -42,7 +42,7 @@ * Since {@link HCatStorer} does not allow extra parameters in the constructor, we use {@link HCatStorerWrapper} * that always treats the last parameter as the external path. */ -public class TestHCatStorerWrapper extends HCatBaseTest { +public class TestHCatStorerWrapper extends TestHCatBase { private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data"; diff --git hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestNotificationListener.java hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestNotificationListener.java index 3a33403..3e4b1ed0 100644 --- hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestNotificationListener.java +++ hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestNotificationListener.java @@ -47,7 +47,7 @@ import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.hcatalog.common.HCatConstants; -import org.apache.hive.hcatalog.mapreduce.HCatBaseTest; +import org.apache.hive.hcatalog.mapreduce.TestHCatBase; import org.apache.hive.hcatalog.messaging.AddPartitionMessage; import org.apache.hive.hcatalog.messaging.AlterPartitionMessage; import org.apache.hive.hcatalog.messaging.AlterTableMessage; @@ -65,7 +65,7 @@ import org.junit.Before; import org.junit.Test; -public class TestNotificationListener extends HCatBaseTest implements MessageListener { +public class TestNotificationListener extends TestHCatBase implements MessageListener { private List actualMessages = new Vector(); private static final int MSG_RECEIVED_TIMEOUT = 30; diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveCompatibility.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveCompatibility.java index c5dfa43..8418b11 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveCompatibility.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveCompatibility.java @@ -34,7 +34,7 @@ import org.junit.BeforeClass; import org.junit.Test; -public class TestHCatHiveCompatibility extends HCatBaseTest { +public class TestHCatHiveCompatibility extends TestHCatBase { private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data"; @BeforeClass diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java index 470ff58..f790530 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java @@ -38,7 +38,7 @@ import java.io.ByteArrayOutputStream; import java.util.Iterator; -public class TestHCatHiveThriftCompatibility extends HCatBaseTest { +public class TestHCatHiveThriftCompatibility extends TestHCatBase { private boolean setUpComplete = false; private Path intStringSeq; diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java new file mode 100644 index 0000000..0948efc --- /dev/null +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java @@ -0,0 +1,153 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.service.cli.thrift; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; +import org.apache.hive.service.auth.HiveAuthConstants; +import org.apache.hive.service.server.HiveServer2; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.Statement; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; + +public class TestThriftCliServiceMessageSize { + protected static int port; + protected static String host = "localhost"; + protected static HiveServer2 hiveServer2; + protected static ThriftCLIServiceClient client; + protected static HiveConf hiveConf; + protected static String USERNAME = "anonymous"; + protected static String PASSWORD = "anonymous"; + + /** + * @throws java.lang.Exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception { + // Find a free port + port = MetaStoreTestUtils.findFreePort(); + hiveServer2 = new HiveServer2(); + hiveConf = new HiveConf(); + } + + /** + * @throws java.lang.Exception + */ + @AfterClass + public static void tearDownAfterClass() throws Exception { + } + + protected static void startHiveServer2WithConf(HiveServer2 hiveServer2, HiveConf hiveConf) + throws Exception { + hiveServer2.init(hiveConf); + // Start HiveServer2 with given config + // Fail if server doesn't start + try { + hiveServer2.start(); + } catch (Throwable t) { + t.printStackTrace(); + fail(); + } + // Wait for startup to complete + Thread.sleep(2000); + System.out.println("HiveServer2 started on port " + port); + } + + protected static void stopHiveServer2(HiveServer2 hiveServer2) throws Exception { + if (hiveServer2 != null) { + hiveServer2.stop(); + } + } + + /** + * @throws java.lang.Exception + */ + @Before + public void setUp() throws Exception { + } + + /** + * @throws java.lang.Exception + */ + @After + public void tearDown() throws Exception { + + } + + @Test + public void testMessageSize() throws Exception { + String transportMode = "binary"; + + hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, host); + hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, port); + hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NONE.toString()); + hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, transportMode); + + HiveServer2 hiveServer2 = new HiveServer2(); + String url = "jdbc:hive2://localhost:" + port + "/default"; + Class.forName("org.apache.hive.jdbc.HiveDriver"); + + try { + // First start HS2 with high message size limit. This should allow connections + hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE, 100*1024*1024); + startHiveServer2WithConf(hiveServer2, hiveConf); + + System.out.println("Started Thrift CLI service with message size limit " + + hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE)); + + // With the high message size limit this connection should work + Connection connection = DriverManager.getConnection(url, "hiveuser", "hive"); + Statement stmt = connection.createStatement(); + assertNotNull("Statement is null", stmt); + stmt.execute("set hive.support.concurrency = false"); + connection.close(); + stopHiveServer2(hiveServer2); + + // Now start HS2 with low message size limit. This should prevent any connections + hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE, 1); + hiveServer2 = new HiveServer2(); + startHiveServer2WithConf(hiveServer2, hiveConf); + System.out.println("Started Thrift CLI service with message size limit " + + hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE)); + + Exception caughtException = null; + try { + // This should fail + connection = DriverManager.getConnection(url, "hiveuser", "hive"); + } catch (Exception err) { + caughtException = err; + } + // Verify we hit an error while connecting + assertNotNull(caughtException); + } finally { + stopHiveServer2(hiveServer2); + hiveServer2 = null; + } + } +} diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/ThriftCliServiceMessageSizeTest.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/ThriftCliServiceMessageSizeTest.java deleted file mode 100644 index 926efa3..0000000 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/ThriftCliServiceMessageSizeTest.java +++ /dev/null @@ -1,158 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.service.cli.thrift; - -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.fail; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.Statement; -import java.util.HashMap; -import java.util.Map; - -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; -import org.apache.hive.service.Service; -import org.apache.hive.service.auth.HiveAuthConstants; -import org.apache.hive.service.auth.HiveAuthConstants.AuthTypes; -import org.apache.hive.service.cli.SessionHandle; -import org.apache.hive.service.server.HiveServer2; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; - -public class ThriftCliServiceMessageSizeTest { - protected static int port; - protected static String host = "localhost"; - protected static HiveServer2 hiveServer2; - protected static ThriftCLIServiceClient client; - protected static HiveConf hiveConf; - protected static String USERNAME = "anonymous"; - protected static String PASSWORD = "anonymous"; - - /** - * @throws java.lang.Exception - */ - @BeforeClass - public static void setUpBeforeClass() throws Exception { - // Find a free port - port = MetaStoreTestUtils.findFreePort(); - hiveServer2 = new HiveServer2(); - hiveConf = new HiveConf(); - } - - /** - * @throws java.lang.Exception - */ - @AfterClass - public static void tearDownAfterClass() throws Exception { - } - - protected static void startHiveServer2WithConf(HiveServer2 hiveServer2, HiveConf hiveConf) - throws Exception { - hiveServer2.init(hiveConf); - // Start HiveServer2 with given config - // Fail if server doesn't start - try { - hiveServer2.start(); - } catch (Throwable t) { - t.printStackTrace(); - fail(); - } - // Wait for startup to complete - Thread.sleep(2000); - System.out.println("HiveServer2 started on port " + port); - } - - protected static void stopHiveServer2(HiveServer2 hiveServer2) throws Exception { - if (hiveServer2 != null) { - hiveServer2.stop(); - } - } - - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception { - } - - /** - * @throws java.lang.Exception - */ - @After - public void tearDown() throws Exception { - - } - - @Test - public void testMessageSize() throws Exception { - String transportMode = "binary"; - - hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); - hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, host); - hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, port); - hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NONE.toString()); - hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, transportMode); - - HiveServer2 hiveServer2 = new HiveServer2(); - String url = "jdbc:hive2://localhost:" + port + "/default"; - Class.forName("org.apache.hive.jdbc.HiveDriver"); - - try { - // First start HS2 with high message size limit. This should allow connections - hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE, 100*1024*1024); - startHiveServer2WithConf(hiveServer2, hiveConf); - - System.out.println("Started Thrift CLI service with message size limit " - + hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE)); - - // With the high message size limit this connection should work - Connection connection = DriverManager.getConnection(url, "hiveuser", "hive"); - Statement stmt = connection.createStatement(); - assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); - connection.close(); - stopHiveServer2(hiveServer2); - - // Now start HS2 with low message size limit. This should prevent any connections - hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE, 1); - hiveServer2 = new HiveServer2(); - startHiveServer2WithConf(hiveServer2, hiveConf); - System.out.println("Started Thrift CLI service with message size limit " - + hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE)); - - Exception caughtException = null; - try { - // This should fail - connection = DriverManager.getConnection(url, "hiveuser", "hive"); - } catch (Exception err) { - caughtException = err; - } - // Verify we hit an error while connecting - assertNotNull(caughtException); - } finally { - stopHiveServer2(hiveServer2); - hiveServer2 = null; - } - } -} diff --git jdbc-handler/src/test/java/org/apache/hive/config/JdbcStorageConfigManagerTest.java jdbc-handler/src/test/java/org/apache/hive/config/JdbcStorageConfigManagerTest.java deleted file mode 100644 index 800172c..0000000 --- jdbc-handler/src/test/java/org/apache/hive/config/JdbcStorageConfigManagerTest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.config; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.junit.Assert.assertThat; - -import org.junit.Test; -import org.junit.Ignore; - -import org.apache.hive.storage.jdbc.conf.DatabaseType; -import org.apache.hive.storage.jdbc.conf.JdbcStorageConfig; -import org.apache.hive.storage.jdbc.conf.JdbcStorageConfigManager; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -public class JdbcStorageConfigManagerTest { - - @Test - public void testWithAllRequiredSettingsDefined() throws Exception { - Properties props = new Properties(); - props.put(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), DatabaseType.MYSQL.toString()); - props.put(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc://localhost:3306/hive"); - props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable"); - props.put(JdbcStorageConfig.JDBC_DRIVER_CLASS.getPropertyName(), "com.mysql.jdbc.Driver"); - - Map jobMap = new HashMap(); - JdbcStorageConfigManager.copyConfigurationToJob(props, jobMap); - - assertThat(jobMap, is(notNullValue())); - assertThat(jobMap.size(), is(equalTo(4))); - assertThat(jobMap.get(JdbcStorageConfig.DATABASE_TYPE.getPropertyName()), is(equalTo("MYSQL"))); - assertThat(jobMap.get(JdbcStorageConfig.JDBC_URL.getPropertyName()), is(equalTo("jdbc://localhost:3306/hive"))); - assertThat(jobMap.get(JdbcStorageConfig.QUERY.getPropertyName()), - is(equalTo("SELECT col1,col2,col3 FROM sometable"))); - } - - - // since metastore connections don't require the url, this is allowable. - @Ignore @Test(expected = IllegalArgumentException.class) - public void testWithJdbcUrlMissing() throws Exception { - Properties props = new Properties(); - props.put(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), DatabaseType.MYSQL.toString()); - props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable"); - - Map jobMap = new HashMap(); - JdbcStorageConfigManager.copyConfigurationToJob(props, jobMap); - } - - - @Test(expected = IllegalArgumentException.class) - public void testWithDatabaseTypeMissing() throws Exception { - Properties props = new Properties(); - props.put(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc://localhost:3306/hive"); - props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable"); - - Map jobMap = new HashMap(); - JdbcStorageConfigManager.copyConfigurationToJob(props, jobMap); - } - - - @Test(expected = IllegalArgumentException.class) - public void testWithUnknownDatabaseType() throws Exception { - Properties props = new Properties(); - props.put(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), "Postgres"); - props.put(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc://localhost:3306/hive"); - props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable"); - - Map jobMap = new HashMap(); - JdbcStorageConfigManager.copyConfigurationToJob(props, jobMap); - } - -} diff --git jdbc-handler/src/test/java/org/apache/hive/config/TestJdbcStorageConfigManager.java jdbc-handler/src/test/java/org/apache/hive/config/TestJdbcStorageConfigManager.java new file mode 100644 index 0000000..3db15b7 --- /dev/null +++ jdbc-handler/src/test/java/org/apache/hive/config/TestJdbcStorageConfigManager.java @@ -0,0 +1,88 @@ +/* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.config; + +import org.apache.hive.storage.jdbc.conf.DatabaseType; +import org.apache.hive.storage.jdbc.conf.JdbcStorageConfig; +import org.apache.hive.storage.jdbc.conf.JdbcStorageConfigManager; +import org.junit.Ignore; +import org.junit.Test; + +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertThat; + +public class TestJdbcStorageConfigManager { + + @Test + public void testWithAllRequiredSettingsDefined() throws Exception { + Properties props = new Properties(); + props.put(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), DatabaseType.MYSQL.toString()); + props.put(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc://localhost:3306/hive"); + props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable"); + props.put(JdbcStorageConfig.JDBC_DRIVER_CLASS.getPropertyName(), "com.mysql.jdbc.Driver"); + + Map jobMap = new HashMap(); + JdbcStorageConfigManager.copyConfigurationToJob(props, jobMap); + + assertThat(jobMap, is(notNullValue())); + assertThat(jobMap.size(), is(equalTo(4))); + assertThat(jobMap.get(JdbcStorageConfig.DATABASE_TYPE.getPropertyName()), is(equalTo("MYSQL"))); + assertThat(jobMap.get(JdbcStorageConfig.JDBC_URL.getPropertyName()), is(equalTo("jdbc://localhost:3306/hive"))); + assertThat(jobMap.get(JdbcStorageConfig.QUERY.getPropertyName()), + is(equalTo("SELECT col1,col2,col3 FROM sometable"))); + } + + + // since metastore connections don't require the url, this is allowable. + @Ignore @Test(expected = IllegalArgumentException.class) + public void testWithJdbcUrlMissing() throws Exception { + Properties props = new Properties(); + props.put(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), DatabaseType.MYSQL.toString()); + props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable"); + + Map jobMap = new HashMap(); + JdbcStorageConfigManager.copyConfigurationToJob(props, jobMap); + } + + + @Test(expected = IllegalArgumentException.class) + public void testWithDatabaseTypeMissing() throws Exception { + Properties props = new Properties(); + props.put(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc://localhost:3306/hive"); + props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable"); + + Map jobMap = new HashMap(); + JdbcStorageConfigManager.copyConfigurationToJob(props, jobMap); + } + + + @Test(expected = IllegalArgumentException.class) + public void testWithUnknownDatabaseType() throws Exception { + Properties props = new Properties(); + props.put(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), "Postgres"); + props.put(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc://localhost:3306/hive"); + props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable"); + + Map jobMap = new HashMap(); + JdbcStorageConfigManager.copyConfigurationToJob(props, jobMap); + } + +} diff --git jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/JdbcInputFormatTest.java jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/JdbcInputFormatTest.java deleted file mode 100644 index cc6acf1..0000000 --- jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/JdbcInputFormatTest.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.storage.jdbc; - -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.junit.Assert.assertThat; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.when; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapred.InputSplit; -import org.apache.hadoop.mapred.JobConf; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; - -import org.apache.hive.storage.jdbc.dao.DatabaseAccessor; -import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException; - -import java.io.IOException; - -@RunWith(MockitoJUnitRunner.class) -public class JdbcInputFormatTest { - - @Mock - private DatabaseAccessor mockDatabaseAccessor; - - - @Test - public void testSplitLogic_noSpillOver() throws HiveJdbcDatabaseAccessException, IOException { - JdbcInputFormat f = new JdbcInputFormat(); - when(mockDatabaseAccessor.getTotalNumberOfRecords(any(Configuration.class))).thenReturn(15); - f.setDbAccessor(mockDatabaseAccessor); - - JobConf conf = new JobConf(); - conf.set("mapred.input.dir", "/temp"); - InputSplit[] splits = f.getSplits(conf, 3); - - assertThat(splits, is(notNullValue())); - assertThat(splits.length, is(3)); - - assertThat(splits[0].getLength(), is(5L)); - } - - - @Test - public void testSplitLogic_withSpillOver() throws HiveJdbcDatabaseAccessException, IOException { - JdbcInputFormat f = new JdbcInputFormat(); - when(mockDatabaseAccessor.getTotalNumberOfRecords(any(Configuration.class))).thenReturn(15); - f.setDbAccessor(mockDatabaseAccessor); - - JobConf conf = new JobConf(); - conf.set("mapred.input.dir", "/temp"); - InputSplit[] splits = f.getSplits(conf, 6); - - assertThat(splits, is(notNullValue())); - assertThat(splits.length, is(6)); - - for (int i = 0; i < 3; i++) { - assertThat(splits[i].getLength(), is(3L)); - } - - for (int i = 3; i < 6; i++) { - assertThat(splits[i].getLength(), is(2L)); - } - } -} diff --git jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/QueryConditionBuilderTest.java jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/QueryConditionBuilderTest.java deleted file mode 100644 index 5cdae47..0000000 --- jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/QueryConditionBuilderTest.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.storage.jdbc; - -import static org.hamcrest.Matchers.equalToIgnoringWhiteSpace; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.junit.Assert.assertThat; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.ql.plan.TableScanDesc; -import org.apache.hadoop.hive.serde.serdeConstants; -import org.junit.BeforeClass; -import org.junit.Test; - -import org.apache.hive.storage.jdbc.conf.JdbcStorageConfig; - -import java.io.IOException; -import java.util.Scanner; - -public class QueryConditionBuilderTest { - - private static String condition1; - private static String condition2; - - - @BeforeClass - public static void setup() throws IOException { - condition1 = readFileContents("condition1.xml"); - condition2 = readFileContents("condition2.xml"); - } - - - private static String readFileContents(String name) throws IOException { - try (Scanner s = new Scanner(QueryConditionBuilderTest.class.getClassLoader().getResourceAsStream(name))) { - return s.useDelimiter("\\Z").next(); - } - } - - - @Test - public void testSimpleCondition_noTranslation() { - Configuration conf = new Configuration(); - conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition1); - conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); - String condition = QueryConditionBuilder.getInstance().buildCondition(conf); - - assertThat(condition, is(notNullValue())); - assertThat(condition, is(equalToIgnoringWhiteSpace("(visitor_id = 'x')"))); - } - - - @Test - public void testSimpleCondition_withTranslation() { - Configuration conf = new Configuration(); - conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition1); - conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); - conf.set(JdbcStorageConfig.COLUMN_MAPPING.getPropertyName(), - "visitor_id=vid, sentiment=sentiment, tracking_id=tracking_id"); - String condition = QueryConditionBuilder.getInstance().buildCondition(conf); - - assertThat(condition, is(notNullValue())); - assertThat(condition, is(equalToIgnoringWhiteSpace("(vid = 'x')"))); - } - - - @Test - public void testSimpleCondition_withDateType() { - Configuration conf = new Configuration(); - conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition1); - conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); - conf.set(JdbcStorageConfig.COLUMN_MAPPING.getPropertyName(), - "visitor_id=vid:date, sentiment=sentiment, tracking_id=tracking_id"); - String condition = QueryConditionBuilder.getInstance().buildCondition(conf); - - assertThat(condition, is(notNullValue())); - assertThat(condition, is(equalToIgnoringWhiteSpace("({d vid} = 'x')"))); - } - - - @Test - public void testSimpleCondition_withVariedCaseMappings() { - Configuration conf = new Configuration(); - conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition1); - conf.set(serdeConstants.LIST_COLUMNS, "visitor_ID,sentiment,tracking_id"); - conf.set(JdbcStorageConfig.COLUMN_MAPPING.getPropertyName(), - "visitor_id=VID:date, sentiment=sentiment, tracking_id=tracking_id"); - String condition = QueryConditionBuilder.getInstance().buildCondition(conf); - - assertThat(condition, is(notNullValue())); - assertThat(condition, is(equalToIgnoringWhiteSpace("({d vid} = 'x')"))); - } - - - @Test - public void testMultipleConditions_noTranslation() { - Configuration conf = new Configuration(); - conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition2); - conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); - String condition = QueryConditionBuilder.getInstance().buildCondition(conf); - - assertThat(condition, is(notNullValue())); - assertThat(condition, is(equalToIgnoringWhiteSpace("((visitor_id = 'x') and (sentiment = 'y'))"))); - } - - - @Test - public void testMultipleConditions_withTranslation() { - Configuration conf = new Configuration(); - conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition2); - conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); - conf.set(JdbcStorageConfig.COLUMN_MAPPING.getPropertyName(), "visitor_id=v,sentiment=s,tracking_id=t"); - String condition = QueryConditionBuilder.getInstance().buildCondition(conf); - - assertThat(condition, is(notNullValue())); - assertThat(condition, is(equalToIgnoringWhiteSpace("((v = 'x') and (s = 'y'))"))); - } - - - @Test - public void testWithNullConf() { - String condition = QueryConditionBuilder.getInstance().buildCondition(null); - assertThat(condition, is(notNullValue())); - assertThat(condition.trim().isEmpty(), is(true)); - } - - - @Test - public void testWithUndefinedFilterExpr() { - Configuration conf = new Configuration(); - conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); - conf.set(JdbcStorageConfig.COLUMN_MAPPING.getPropertyName(), "visitor_id=v,sentiment=s,tracking_id=t"); - String condition = QueryConditionBuilder.getInstance().buildCondition(conf); - - assertThat(condition, is(notNullValue())); - assertThat(condition.trim().isEmpty(), is(true)); - } - -} diff --git jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java new file mode 100644 index 0000000..e904774 --- /dev/null +++ jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java @@ -0,0 +1,80 @@ +/* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.storage.jdbc; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.mapred.InputSplit; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hive.storage.jdbc.dao.DatabaseAccessor; +import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.io.IOException; + +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertThat; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class TestJdbcInputFormat { + + @Mock + private DatabaseAccessor mockDatabaseAccessor; + + + @Test + public void testSplitLogic_noSpillOver() throws HiveJdbcDatabaseAccessException, IOException { + JdbcInputFormat f = new JdbcInputFormat(); + when(mockDatabaseAccessor.getTotalNumberOfRecords(any(Configuration.class))).thenReturn(15); + f.setDbAccessor(mockDatabaseAccessor); + + JobConf conf = new JobConf(); + conf.set("mapred.input.dir", "/temp"); + InputSplit[] splits = f.getSplits(conf, 3); + + assertThat(splits, is(notNullValue())); + assertThat(splits.length, is(3)); + + assertThat(splits[0].getLength(), is(5L)); + } + + + @Test + public void testSplitLogic_withSpillOver() throws HiveJdbcDatabaseAccessException, IOException { + JdbcInputFormat f = new JdbcInputFormat(); + when(mockDatabaseAccessor.getTotalNumberOfRecords(any(Configuration.class))).thenReturn(15); + f.setDbAccessor(mockDatabaseAccessor); + + JobConf conf = new JobConf(); + conf.set("mapred.input.dir", "/temp"); + InputSplit[] splits = f.getSplits(conf, 6); + + assertThat(splits, is(notNullValue())); + assertThat(splits.length, is(6)); + + for (int i = 0; i < 3; i++) { + assertThat(splits[i].getLength(), is(3L)); + } + + for (int i = 3; i < 6; i++) { + assertThat(splits[i].getLength(), is(2L)); + } + } +} diff --git jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestQueryConditionBuilder.java jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestQueryConditionBuilder.java new file mode 100644 index 0000000..a59645d --- /dev/null +++ jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestQueryConditionBuilder.java @@ -0,0 +1,150 @@ +/* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.storage.jdbc; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.plan.TableScanDesc; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hive.storage.jdbc.conf.JdbcStorageConfig; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.io.IOException; +import java.util.Scanner; + +import static org.hamcrest.Matchers.equalToIgnoringWhiteSpace; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertThat; + +public class TestQueryConditionBuilder { + + private static String condition1; + private static String condition2; + + + @BeforeClass + public static void setup() throws IOException { + condition1 = readFileContents("condition1.xml"); + condition2 = readFileContents("condition2.xml"); + } + + + private static String readFileContents(String name) throws IOException { + try (Scanner s = new Scanner(TestQueryConditionBuilder.class.getClassLoader().getResourceAsStream(name))) { + return s.useDelimiter("\\Z").next(); + } + } + + + @Test + public void testSimpleCondition_noTranslation() { + Configuration conf = new Configuration(); + conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition1); + conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); + String condition = QueryConditionBuilder.getInstance().buildCondition(conf); + + assertThat(condition, is(notNullValue())); + assertThat(condition, is(equalToIgnoringWhiteSpace("(visitor_id = 'x')"))); + } + + + @Test + public void testSimpleCondition_withTranslation() { + Configuration conf = new Configuration(); + conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition1); + conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); + conf.set(JdbcStorageConfig.COLUMN_MAPPING.getPropertyName(), + "visitor_id=vid, sentiment=sentiment, tracking_id=tracking_id"); + String condition = QueryConditionBuilder.getInstance().buildCondition(conf); + + assertThat(condition, is(notNullValue())); + assertThat(condition, is(equalToIgnoringWhiteSpace("(vid = 'x')"))); + } + + + @Test + public void testSimpleCondition_withDateType() { + Configuration conf = new Configuration(); + conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition1); + conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); + conf.set(JdbcStorageConfig.COLUMN_MAPPING.getPropertyName(), + "visitor_id=vid:date, sentiment=sentiment, tracking_id=tracking_id"); + String condition = QueryConditionBuilder.getInstance().buildCondition(conf); + + assertThat(condition, is(notNullValue())); + assertThat(condition, is(equalToIgnoringWhiteSpace("({d vid} = 'x')"))); + } + + + @Test + public void testSimpleCondition_withVariedCaseMappings() { + Configuration conf = new Configuration(); + conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition1); + conf.set(serdeConstants.LIST_COLUMNS, "visitor_ID,sentiment,tracking_id"); + conf.set(JdbcStorageConfig.COLUMN_MAPPING.getPropertyName(), + "visitor_id=VID:date, sentiment=sentiment, tracking_id=tracking_id"); + String condition = QueryConditionBuilder.getInstance().buildCondition(conf); + + assertThat(condition, is(notNullValue())); + assertThat(condition, is(equalToIgnoringWhiteSpace("({d vid} = 'x')"))); + } + + + @Test + public void testMultipleConditions_noTranslation() { + Configuration conf = new Configuration(); + conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition2); + conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); + String condition = QueryConditionBuilder.getInstance().buildCondition(conf); + + assertThat(condition, is(notNullValue())); + assertThat(condition, is(equalToIgnoringWhiteSpace("((visitor_id = 'x') and (sentiment = 'y'))"))); + } + + + @Test + public void testMultipleConditions_withTranslation() { + Configuration conf = new Configuration(); + conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, condition2); + conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); + conf.set(JdbcStorageConfig.COLUMN_MAPPING.getPropertyName(), "visitor_id=v,sentiment=s,tracking_id=t"); + String condition = QueryConditionBuilder.getInstance().buildCondition(conf); + + assertThat(condition, is(notNullValue())); + assertThat(condition, is(equalToIgnoringWhiteSpace("((v = 'x') and (s = 'y'))"))); + } + + + @Test + public void testWithNullConf() { + String condition = QueryConditionBuilder.getInstance().buildCondition(null); + assertThat(condition, is(notNullValue())); + assertThat(condition.trim().isEmpty(), is(true)); + } + + + @Test + public void testWithUndefinedFilterExpr() { + Configuration conf = new Configuration(); + conf.set(serdeConstants.LIST_COLUMNS, "visitor_id,sentiment,tracking_id"); + conf.set(JdbcStorageConfig.COLUMN_MAPPING.getPropertyName(), "visitor_id=v,sentiment=s,tracking_id=t"); + String condition = QueryConditionBuilder.getInstance().buildCondition(conf); + + assertThat(condition, is(notNullValue())); + assertThat(condition.trim().isEmpty(), is(true)); + } + +} diff --git jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestSuite.java jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestSuite.java deleted file mode 100644 index df8eab7..0000000 --- jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestSuite.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.storage.jdbc; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; - -import org.apache.hive.config.JdbcStorageConfigManagerTest; -import org.apache.hive.storage.jdbc.QueryConditionBuilderTest; -import org.apache.hive.storage.jdbc.dao.GenericJdbcDatabaseAccessorTest; - -@RunWith(Suite.class) -@SuiteClasses({ JdbcStorageConfigManagerTest.class, GenericJdbcDatabaseAccessorTest.class, - QueryConditionBuilderTest.class }) -public class TestSuite { -} diff --git jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessorTest.java jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessorTest.java deleted file mode 100644 index b2442e8a..0000000 --- jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessorTest.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.storage.jdbc.dao; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.equalToIgnoringCase; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.junit.Assert.assertThat; - -import org.apache.hadoop.conf.Configuration; -import org.junit.Test; - -import org.apache.hive.storage.jdbc.conf.JdbcStorageConfig; -import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException; - -import java.util.List; -import java.util.Map; - -public class GenericJdbcDatabaseAccessorTest { - - @Test - public void testGetColumnNames_starQuery() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - List columnNames = accessor.getColumnNames(conf); - - assertThat(columnNames, is(notNullValue())); - assertThat(columnNames.size(), is(equalTo(7))); - assertThat(columnNames.get(0), is(equalToIgnoringCase("strategy_id"))); - } - - - @Test - public void testGetColumnNames_fieldListQuery() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select name,referrer from test_strategy"); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - List columnNames = accessor.getColumnNames(conf); - - assertThat(columnNames, is(notNullValue())); - assertThat(columnNames.size(), is(equalTo(2))); - assertThat(columnNames.get(0), is(equalToIgnoringCase("name"))); - } - - - @Test(expected = HiveJdbcDatabaseAccessException.class) - public void testGetColumnNames_invalidQuery() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from invalid_strategy"); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - @SuppressWarnings("unused") - List columnNames = accessor.getColumnNames(conf); - } - - - @Test - public void testGetTotalNumberOfRecords() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - int numRecords = accessor.getTotalNumberOfRecords(conf); - - assertThat(numRecords, is(equalTo(5))); - } - - - @Test - public void testGetTotalNumberOfRecords_whereClause() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy where strategy_id = '5'"); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - int numRecords = accessor.getTotalNumberOfRecords(conf); - - assertThat(numRecords, is(equalTo(1))); - } - - - @Test - public void testGetTotalNumberOfRecords_noRecords() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy where strategy_id = '25'"); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - int numRecords = accessor.getTotalNumberOfRecords(conf); - - assertThat(numRecords, is(equalTo(0))); - } - - - @Test(expected = HiveJdbcDatabaseAccessException.class) - public void testGetTotalNumberOfRecords_invalidQuery() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from strategyx where strategy_id = '5'"); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - @SuppressWarnings("unused") - int numRecords = accessor.getTotalNumberOfRecords(conf); - } - - - @Test - public void testGetRecordIterator() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 2, 0); - - assertThat(iterator, is(notNullValue())); - - int count = 0; - while (iterator.hasNext()) { - Map record = iterator.next(); - count++; - - assertThat(record, is(notNullValue())); - assertThat(record.size(), is(equalTo(7))); - assertThat(record.get("STRATEGY_ID"), is(equalTo(count))); - } - - assertThat(count, is(equalTo(2))); - iterator.close(); - } - - - @Test - public void testGetRecordIterator_offsets() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 2, 2); - - assertThat(iterator, is(notNullValue())); - - int count = 0; - while (iterator.hasNext()) { - Map record = iterator.next(); - count++; - - assertThat(record, is(notNullValue())); - assertThat(record.size(), is(equalTo(7))); - assertThat(record.get("STRATEGY_ID"), is(equalTo(count + 2))); - } - - assertThat(count, is(equalTo(2))); - iterator.close(); - } - - - @Test - public void testGetRecordIterator_emptyResultSet() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy where strategy_id = '25'"); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 0, 2); - - assertThat(iterator, is(notNullValue())); - assertThat(iterator.hasNext(), is(false)); - iterator.close(); - } - - - @Test - public void testGetRecordIterator_largeOffset() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 10, 25); - - assertThat(iterator, is(notNullValue())); - assertThat(iterator.hasNext(), is(false)); - iterator.close(); - } - - - @Test(expected = HiveJdbcDatabaseAccessException.class) - public void testGetRecordIterator_invalidQuery() throws HiveJdbcDatabaseAccessException { - Configuration conf = buildConfiguration(); - conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from strategyx"); - DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); - @SuppressWarnings("unused") - JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 0, 2); - } - - - private Configuration buildConfiguration() { - String scriptPath = - GenericJdbcDatabaseAccessorTest.class.getClassLoader().getResource("test_script.sql") - .getPath(); - Configuration config = new Configuration(); - config.set(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), "H2"); - config.set(JdbcStorageConfig.JDBC_DRIVER_CLASS.getPropertyName(), "org.h2.Driver"); - config.set(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc:h2:mem:test;MODE=MySQL;INIT=runscript from '" - + scriptPath + "'"); - config.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy"); - - return config; - } - -} diff --git jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/TestGenericJdbcDatabaseAccessor.java jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/TestGenericJdbcDatabaseAccessor.java new file mode 100644 index 0000000..34f061e --- /dev/null +++ jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/TestGenericJdbcDatabaseAccessor.java @@ -0,0 +1,205 @@ +/* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.storage.jdbc.dao; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hive.storage.jdbc.conf.JdbcStorageConfig; +import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException; +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.equalToIgnoringCase; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertThat; + +public class TestGenericJdbcDatabaseAccessor { + + @Test + public void testGetColumnNames_starQuery() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + List columnNames = accessor.getColumnNames(conf); + + assertThat(columnNames, is(notNullValue())); + assertThat(columnNames.size(), is(equalTo(7))); + assertThat(columnNames.get(0), is(equalToIgnoringCase("strategy_id"))); + } + + + @Test + public void testGetColumnNames_fieldListQuery() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select name,referrer from test_strategy"); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + List columnNames = accessor.getColumnNames(conf); + + assertThat(columnNames, is(notNullValue())); + assertThat(columnNames.size(), is(equalTo(2))); + assertThat(columnNames.get(0), is(equalToIgnoringCase("name"))); + } + + + @Test(expected = HiveJdbcDatabaseAccessException.class) + public void testGetColumnNames_invalidQuery() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from invalid_strategy"); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + @SuppressWarnings("unused") + List columnNames = accessor.getColumnNames(conf); + } + + + @Test + public void testGetTotalNumberOfRecords() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + int numRecords = accessor.getTotalNumberOfRecords(conf); + + assertThat(numRecords, is(equalTo(5))); + } + + + @Test + public void testGetTotalNumberOfRecords_whereClause() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy where strategy_id = '5'"); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + int numRecords = accessor.getTotalNumberOfRecords(conf); + + assertThat(numRecords, is(equalTo(1))); + } + + + @Test + public void testGetTotalNumberOfRecords_noRecords() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy where strategy_id = '25'"); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + int numRecords = accessor.getTotalNumberOfRecords(conf); + + assertThat(numRecords, is(equalTo(0))); + } + + + @Test(expected = HiveJdbcDatabaseAccessException.class) + public void testGetTotalNumberOfRecords_invalidQuery() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from strategyx where strategy_id = '5'"); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + @SuppressWarnings("unused") + int numRecords = accessor.getTotalNumberOfRecords(conf); + } + + + @Test + public void testGetRecordIterator() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 2, 0); + + assertThat(iterator, is(notNullValue())); + + int count = 0; + while (iterator.hasNext()) { + Map record = iterator.next(); + count++; + + assertThat(record, is(notNullValue())); + assertThat(record.size(), is(equalTo(7))); + assertThat(record.get("STRATEGY_ID"), is(equalTo(count))); + } + + assertThat(count, is(equalTo(2))); + iterator.close(); + } + + + @Test + public void testGetRecordIterator_offsets() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 2, 2); + + assertThat(iterator, is(notNullValue())); + + int count = 0; + while (iterator.hasNext()) { + Map record = iterator.next(); + count++; + + assertThat(record, is(notNullValue())); + assertThat(record.size(), is(equalTo(7))); + assertThat(record.get("STRATEGY_ID"), is(equalTo(count + 2))); + } + + assertThat(count, is(equalTo(2))); + iterator.close(); + } + + + @Test + public void testGetRecordIterator_emptyResultSet() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy where strategy_id = '25'"); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 0, 2); + + assertThat(iterator, is(notNullValue())); + assertThat(iterator.hasNext(), is(false)); + iterator.close(); + } + + + @Test + public void testGetRecordIterator_largeOffset() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 10, 25); + + assertThat(iterator, is(notNullValue())); + assertThat(iterator.hasNext(), is(false)); + iterator.close(); + } + + + @Test(expected = HiveJdbcDatabaseAccessException.class) + public void testGetRecordIterator_invalidQuery() throws HiveJdbcDatabaseAccessException { + Configuration conf = buildConfiguration(); + conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from strategyx"); + DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); + @SuppressWarnings("unused") + JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 0, 2); + } + + + private Configuration buildConfiguration() { + String scriptPath = + TestGenericJdbcDatabaseAccessor.class.getClassLoader().getResource("test_script.sql") + .getPath(); + Configuration config = new Configuration(); + config.set(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), "H2"); + config.set(JdbcStorageConfig.JDBC_DRIVER_CLASS.getPropertyName(), "org.h2.Driver"); + config.set(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc:h2:mem:test;MODE=MySQL;INIT=runscript from '" + + scriptPath + "'"); + config.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy"); + + return config; + } + +} diff --git jdbc/src/test/org/apache/hive/jdbc/HiveStatementTest.java jdbc/src/test/org/apache/hive/jdbc/HiveStatementTest.java deleted file mode 100644 index b06703d..0000000 --- jdbc/src/test/org/apache/hive/jdbc/HiveStatementTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.jdbc; - -import org.junit.Test; - -import java.sql.SQLException; - -import static org.junit.Assert.assertEquals; - -public class HiveStatementTest { - - @Test - public void testSetFetchSize1() throws SQLException { - HiveStatement stmt = new HiveStatement(null, null, null); - stmt.setFetchSize(123); - assertEquals(123, stmt.getFetchSize()); - } - - @Test - public void testSetFetchSize2() throws SQLException { - HiveStatement stmt = new HiveStatement(null, null, null); - int initial = stmt.getFetchSize(); - stmt.setFetchSize(0); - assertEquals(initial, stmt.getFetchSize()); - } - - @Test(expected = SQLException.class) - public void testSetFetchSize3() throws SQLException { - HiveStatement stmt = new HiveStatement(null, null, null); - stmt.setFetchSize(-1); - } - - @Test - public void testaddBatch() throws SQLException { - HiveStatement stmt = new HiveStatement(null, null, null); - try { - stmt.addBatch(null); - } catch (SQLException e) { - assertEquals("java.sql.SQLFeatureNotSupportedException: Method not supported", e.toString()); - } - } -} diff --git jdbc/src/test/org/apache/hive/jdbc/TestHiveStatement.java jdbc/src/test/org/apache/hive/jdbc/TestHiveStatement.java new file mode 100644 index 0000000..eeb6b5d --- /dev/null +++ jdbc/src/test/org/apache/hive/jdbc/TestHiveStatement.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.jdbc; + +import org.junit.Test; + +import java.sql.SQLException; + +import static org.junit.Assert.assertEquals; + +public class TestHiveStatement { + + @Test + public void testSetFetchSize1() throws SQLException { + HiveStatement stmt = new HiveStatement(null, null, null); + stmt.setFetchSize(123); + assertEquals(123, stmt.getFetchSize()); + } + + @Test + public void testSetFetchSize2() throws SQLException { + HiveStatement stmt = new HiveStatement(null, null, null); + int initial = stmt.getFetchSize(); + stmt.setFetchSize(0); + assertEquals(initial, stmt.getFetchSize()); + } + + @Test(expected = SQLException.class) + public void testSetFetchSize3() throws SQLException { + HiveStatement stmt = new HiveStatement(null, null, null); + stmt.setFetchSize(-1); + } + + @Test + public void testaddBatch() throws SQLException { + HiveStatement stmt = new HiveStatement(null, null, null); + try { + stmt.addBatch(null); + } catch (SQLException e) { + assertEquals("java.sql.SQLFeatureNotSupportedException: Method not supported", e.toString()); + } + } +} diff --git metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java deleted file mode 100644 index c278338..0000000 --- metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java +++ /dev/null @@ -1,106 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.metastore.messaging.json; - -import org.codehaus.jackson.annotate.JsonProperty; -import org.json.JSONException; -import org.junit.Test; -import org.skyscreamer.jsonassert.JSONAssert; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.junit.Assert.*; - -public class JSONMessageDeserializerTest { - - public static class MyClass { - @JsonProperty - private int a; - @JsonProperty - private Map map; - private long l; - private String shouldNotSerialize = "shouldNotSerialize"; - - //for jackson to instantiate - MyClass() { - } - - MyClass(int a, Map map, long l) { - this.a = a; - this.map = map; - this.l = l; - } - - @JsonProperty - long getL() { - return l; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - MyClass myClass = (MyClass) o; - - if (a != myClass.a) - return false; - if (l != myClass.l) - return false; - if (!map.equals(myClass.map)) - return false; - return shouldNotSerialize.equals(myClass.shouldNotSerialize); - } - - @Override - public int hashCode() { - int result = a; - result = 31 * result + map.hashCode(); - result = 31 * result + (int) (l ^ (l >>> 32)); - result = 31 * result + shouldNotSerialize.hashCode(); - return result; - } - } - - @Test - public void shouldNotSerializePropertiesNotAnnotated() throws IOException, JSONException { - MyClass obj = new MyClass(Integer.MAX_VALUE, new HashMap() {{ - put("a", "a"); - put("b", "b"); - }}, Long.MAX_VALUE); - String json = JSONMessageDeserializer.mapper.writeValueAsString(obj); - JSONAssert.assertEquals( - "{\"a\":2147483647,\"map\":{\"b\":\"b\",\"a\":\"a\"},\"l\":9223372036854775807}", json, - false); - } - - @Test - public void shouldDeserializeJsonStringToObject() throws IOException { - String json = "{\"a\":47,\"map\":{\"a\":\"a\",\"b\":\"a value for b\"},\"l\":98}"; - MyClass actual = JSONMessageDeserializer.mapper.readValue(json, MyClass.class); - MyClass expected = new MyClass(47, new HashMap() {{ - put("a", "a"); - put("b", "a value for b"); - }}, 98L); - assertEquals(expected, actual); - } -} \ No newline at end of file diff --git metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/TestJSONMessageDeserializer.java metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/TestJSONMessageDeserializer.java new file mode 100644 index 0000000..9e22d8f --- /dev/null +++ metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/TestJSONMessageDeserializer.java @@ -0,0 +1,106 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.messaging.json; + +import org.codehaus.jackson.annotate.JsonProperty; +import org.json.JSONException; +import org.junit.Test; +import org.skyscreamer.jsonassert.JSONAssert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.assertEquals; + +public class TestJSONMessageDeserializer { + + public static class MyClass { + @JsonProperty + private int a; + @JsonProperty + private Map map; + private long l; + private String shouldNotSerialize = "shouldNotSerialize"; + + //for jackson to instantiate + MyClass() { + } + + MyClass(int a, Map map, long l) { + this.a = a; + this.map = map; + this.l = l; + } + + @JsonProperty + long getL() { + return l; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + MyClass myClass = (MyClass) o; + + if (a != myClass.a) + return false; + if (l != myClass.l) + return false; + if (!map.equals(myClass.map)) + return false; + return shouldNotSerialize.equals(myClass.shouldNotSerialize); + } + + @Override + public int hashCode() { + int result = a; + result = 31 * result + map.hashCode(); + result = 31 * result + (int) (l ^ (l >>> 32)); + result = 31 * result + shouldNotSerialize.hashCode(); + return result; + } + } + + @Test + public void shouldNotSerializePropertiesNotAnnotated() throws IOException, JSONException { + MyClass obj = new MyClass(Integer.MAX_VALUE, new HashMap() {{ + put("a", "a"); + put("b", "b"); + }}, Long.MAX_VALUE); + String json = JSONMessageDeserializer.mapper.writeValueAsString(obj); + JSONAssert.assertEquals( + "{\"a\":2147483647,\"map\":{\"b\":\"b\",\"a\":\"a\"},\"l\":9223372036854775807}", json, + false); + } + + @Test + public void shouldDeserializeJsonStringToObject() throws IOException { + String json = "{\"a\":47,\"map\":{\"a\":\"a\",\"b\":\"a value for b\"},\"l\":98}"; + MyClass actual = JSONMessageDeserializer.mapper.readValue(json, MyClass.class); + MyClass expected = new MyClass(47, new HashMap() {{ + put("a", "a"); + put("b", "a value for b"); + }}, 98L); + assertEquals(expected, actual); + } +} \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TaskTrackerTest.java ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TaskTrackerTest.java deleted file mode 100644 index ed0ebef..0000000 --- ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TaskTrackerTest.java +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.exec.repl.bootstrap.load; - -import org.apache.hadoop.hive.ql.exec.Task; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.powermock.modules.junit4.PowerMockRunner; - -import java.io.Serializable; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -@RunWith(PowerMockRunner.class) - public class TaskTrackerTest { - @Mock - private Task task; - - @Test - public void taskTrackerCompositionInitializesTheMaxTasksCorrectly() { - TaskTracker taskTracker = new TaskTracker(1); - assertTrue(taskTracker.canAddMoreTasks()); - taskTracker.addTask(task); - assertFalse(taskTracker.canAddMoreTasks()); - - TaskTracker taskTracker2 = new TaskTracker(taskTracker); - assertFalse(taskTracker2.canAddMoreTasks()); - } -} \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TestTaskTracker.java ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TestTaskTracker.java new file mode 100644 index 0000000..ac2db80 --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TestTaskTracker.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.exec.repl.bootstrap.load; + +import org.apache.hadoop.hive.ql.exec.Task; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.powermock.modules.junit4.PowerMockRunner; + +import java.io.Serializable; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +@RunWith(PowerMockRunner.class) + public class TestTaskTracker { + @Mock + private Task task; + + @Test + public void taskTrackerCompositionInitializesTheMaxTasksCorrectly() { + TaskTracker taskTracker = new TaskTracker(1); + assertTrue(taskTracker.canAddMoreTasks()); + taskTracker.addTask(task); + assertFalse(taskTracker.canAddMoreTasks()); + + TaskTracker taskTracker2 = new TaskTracker(taskTracker); + assertFalse(taskTracker2.canAddMoreTasks()); + } +} \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/tez/InputSplitComparatorTest.java ql/src/test/org/apache/hadoop/hive/ql/exec/tez/InputSplitComparatorTest.java deleted file mode 100644 index add65ae..0000000 --- ql/src/test/org/apache/hadoop/hive/ql/exec/tez/InputSplitComparatorTest.java +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec.tez; - -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.mapred.FileSplit; -import org.junit.Test; - -import static org.apache.hadoop.hive.ql.exec.tez.HiveSplitGenerator.InputSplitComparator; -import static org.junit.Assert.assertEquals; - -public class InputSplitComparatorTest { - - private static final String[] EMPTY = new String[]{}; - - @Test - public void testCompare1() throws Exception { - FileSplit split1 = new FileSplit(new Path("/abc/def"), 2000L, 500L, EMPTY); - FileSplit split2 = new FileSplit(new Path("/abc/def"), 1000L, 500L, EMPTY); - InputSplitComparator comparator = new InputSplitComparator(); - assertEquals(1, comparator.compare(split1, split2)); - } -} diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestInputSplitComparator.java ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestInputSplitComparator.java new file mode 100644 index 0000000..f1c1885 --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestInputSplitComparator.java @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.tez; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.mapred.FileSplit; +import org.junit.Test; + +import static org.apache.hadoop.hive.ql.exec.tez.HiveSplitGenerator.InputSplitComparator; +import static org.junit.Assert.assertEquals; + +public class TestInputSplitComparator { + + private static final String[] EMPTY = new String[]{}; + + @Test + public void testCompare1() throws Exception { + FileSplit split1 = new FileSplit(new Path("/abc/def"), 2000L, 500L, EMPTY); + FileSplit split2 = new FileSplit(new Path("/abc/def"), 1000L, 500L, EMPTY); + InputSplitComparator comparator = new InputSplitComparator(); + assertEquals(1, comparator.compare(split1, split2)); + } +} diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/repl/CopyUtilsTest.java ql/src/test/org/apache/hadoop/hive/ql/parse/repl/CopyUtilsTest.java deleted file mode 100644 index e643d8f..0000000 --- ql/src/test/org/apache/hadoop/hive/ql/parse/repl/CopyUtilsTest.java +++ /dev/null @@ -1,47 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.parse.repl; - -import org.apache.hadoop.hive.conf.HiveConf; -import org.junit.Test; - -import static org.junit.Assert.assertFalse; - -public class CopyUtilsTest { - /* - Distcp currently does not copy a single file in a distributed manner hence we dont care about - the size of file, if there is only file, we dont want to launch distcp. - */ - @Test - public void distcpShouldNotBeCalledOnlyForOneFile() { - HiveConf conf = new HiveConf(); - conf.setLongVar(HiveConf.ConfVars.HIVE_EXEC_COPYFILE_MAXSIZE, 1); - CopyUtils copyUtils = new CopyUtils("", conf); - long MB_128 = 128 * 1024 * 1024; - assertFalse(copyUtils.limitReachedForLocalCopy(MB_128, 1L)); - } - - @Test - public void distcpShouldNotBeCalledForSmallerFileSize() { - HiveConf conf = new HiveConf(); - CopyUtils copyUtils = new CopyUtils("", conf); - long MB_16 = 16 * 1024 * 1024; - assertFalse(copyUtils.limitReachedForLocalCopy(MB_16, 100L)); - } -} \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/repl/TestCopyUtils.java ql/src/test/org/apache/hadoop/hive/ql/parse/repl/TestCopyUtils.java new file mode 100644 index 0000000..a203afc --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/parse/repl/TestCopyUtils.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse.repl; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.junit.Test; + +import static org.junit.Assert.assertFalse; + +public class TestCopyUtils { + /* + Distcp currently does not copy a single file in a distributed manner hence we dont care about + the size of file, if there is only file, we dont want to launch distcp. + */ + @Test + public void distcpShouldNotBeCalledOnlyForOneFile() { + HiveConf conf = new HiveConf(); + conf.setLongVar(HiveConf.ConfVars.HIVE_EXEC_COPYFILE_MAXSIZE, 1); + CopyUtils copyUtils = new CopyUtils("", conf); + long MB_128 = 128 * 1024 * 1024; + assertFalse(copyUtils.limitReachedForLocalCopy(MB_128, 1L)); + } + + @Test + public void distcpShouldNotBeCalledForSmallerFileSize() { + HiveConf conf = new HiveConf(); + CopyUtils copyUtils = new CopyUtils("", conf); + long MB_16 = 16 * 1024 * 1024; + assertFalse(copyUtils.limitReachedForLocalCopy(MB_16, 100L)); + } +} \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/HiveWrapperTest.java ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/HiveWrapperTest.java deleted file mode 100644 index fab9327..0000000 --- ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/HiveWrapperTest.java +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.parse.repl.dump; - -import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.parse.ReplicationSpec; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.InOrder; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; - -@RunWith(MockitoJUnitRunner.class) -public class HiveWrapperTest { - @Mock - private HiveWrapper.Tuple.Function specFunction; - @Mock - private HiveWrapper.Tuple.Function tableFunction; - - @Test - public void replicationIdIsRequestedBeforeObjectDefinition() throws HiveException { - new HiveWrapper.Tuple<>(specFunction, tableFunction); - InOrder inOrder = Mockito.inOrder(specFunction, tableFunction); - inOrder.verify(specFunction).fromMetaStore(); - inOrder.verify(tableFunction).fromMetaStore(); - } -} \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/TestHiveWrapper.java ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/TestHiveWrapper.java new file mode 100644 index 0000000..5a1e60b --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/TestHiveWrapper.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse.repl.dump; + +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InOrder; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.runners.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class TestHiveWrapper { + @Mock + private HiveWrapper.Tuple.Function specFunction; + @Mock + private HiveWrapper.Tuple.Function
tableFunction; + + @Test + public void replicationIdIsRequestedBeforeObjectDefinition() throws HiveException { + new HiveWrapper.Tuple<>(specFunction, tableFunction); + InOrder inOrder = Mockito.inOrder(specFunction, tableFunction); + inOrder.verify(specFunction).fromMetaStore(); + inOrder.verify(tableFunction).fromMetaStore(); + } +} \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/repl/load/message/PrimaryToReplicaResourceFunctionTest.java ql/src/test/org/apache/hadoop/hive/ql/parse/repl/load/message/PrimaryToReplicaResourceFunctionTest.java deleted file mode 100644 index 1859dba..0000000 --- ql/src/test/org/apache/hadoop/hive/ql/parse/repl/load/message/PrimaryToReplicaResourceFunctionTest.java +++ /dev/null @@ -1,105 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.parse.repl.load.message; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.api.Function; -import org.apache.hadoop.hive.metastore.api.ResourceType; -import org.apache.hadoop.hive.metastore.api.ResourceUri; -import org.apache.hadoop.hive.ql.exec.ReplCopyTask; -import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.parse.ReplicationSpec; -import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.parse.repl.load.MetaData; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.mockito.Mock; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; - -import static org.apache.hadoop.hive.ql.parse.repl.load.message.CreateFunctionHandler.PrimaryToReplicaResourceFunction; -import static org.apache.hadoop.hive.ql.parse.repl.load.message.MessageHandler.Context; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; -import static org.powermock.api.mockito.PowerMockito.mockStatic; -import static org.powermock.api.mockito.PowerMockito.when; -import static org.mockito.Matchers.any; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({ PrimaryToReplicaResourceFunction.class, FileSystem.class, ReplCopyTask.class, - System.class }) -public class PrimaryToReplicaResourceFunctionTest { - - private PrimaryToReplicaResourceFunction function; - @Mock - private HiveConf hiveConf; - @Mock - - private Function functionObj; - @Mock - private FileSystem mockFs; - private static Logger logger = - LoggerFactory.getLogger(PrimaryToReplicaResourceFunctionTest.class); - - @Before - public void setup() { - MetaData metadata = new MetaData(null, null, null, null, functionObj); - Context context = - new Context("primaryDb", null, null, null, null, hiveConf, null, null, logger); - when(hiveConf.getVar(HiveConf.ConfVars.REPL_FUNCTIONS_ROOT_DIR)) - .thenReturn("/someBasePath/withADir/"); - function = new PrimaryToReplicaResourceFunction(context, metadata, "replicaDbName"); - } - - @Test - public void createDestinationPath() throws IOException, SemanticException, URISyntaxException { - mockStatic(FileSystem.class); - when(FileSystem.get(any(Configuration.class))).thenReturn(mockFs); - when(mockFs.getScheme()).thenReturn("hdfs"); - when(mockFs.getUri()).thenReturn(new URI("hdfs", "somehost:9000", null, null, null)); - mockStatic(System.class); - when(System.currentTimeMillis()).thenReturn(Long.MAX_VALUE); - when(functionObj.getFunctionName()).thenReturn("someFunctionName"); - mockStatic(ReplCopyTask.class); - Task mock = mock(Task.class); - when(ReplCopyTask.getLoadCopyTask(any(ReplicationSpec.class), any(Path.class), any(Path.class), - any(HiveConf.class))).thenReturn(mock); - - ResourceUri resourceUri = function.destinationResourceUri(new ResourceUri(ResourceType.JAR, - "hdfs://localhost:9000/user/someplace/ab.jar#e094828883")); - - assertThat(resourceUri.getUri(), - is(equalTo( - "hdfs://somehost:9000/someBasePath/withADir/replicaDbName/somefunctionname/" + String - .valueOf(Long.MAX_VALUE) + "/ab.jar"))); - } -} \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/repl/load/message/TestPrimaryToReplicaResourceFunction.java ql/src/test/org/apache/hadoop/hive/ql/parse/repl/load/message/TestPrimaryToReplicaResourceFunction.java new file mode 100644 index 0000000..ba3f598 --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/parse/repl/load/message/TestPrimaryToReplicaResourceFunction.java @@ -0,0 +1,104 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse.repl.load.message; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Function; +import org.apache.hadoop.hive.metastore.api.ResourceType; +import org.apache.hadoop.hive.metastore.api.ResourceUri; +import org.apache.hadoop.hive.ql.exec.ReplCopyTask; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.parse.repl.load.MetaData; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; + +import static org.apache.hadoop.hive.ql.parse.repl.load.message.CreateFunctionHandler.PrimaryToReplicaResourceFunction; +import static org.apache.hadoop.hive.ql.parse.repl.load.message.MessageHandler.Context; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.powermock.api.mockito.PowerMockito.mockStatic; +import static org.powermock.api.mockito.PowerMockito.when; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({ PrimaryToReplicaResourceFunction.class, FileSystem.class, ReplCopyTask.class, + System.class }) +public class TestPrimaryToReplicaResourceFunction { + + private PrimaryToReplicaResourceFunction function; + @Mock + private HiveConf hiveConf; + @Mock + + private Function functionObj; + @Mock + private FileSystem mockFs; + private static Logger logger = + LoggerFactory.getLogger(TestPrimaryToReplicaResourceFunction.class); + + @Before + public void setup() { + MetaData metadata = new MetaData(null, null, null, null, functionObj); + Context context = + new Context("primaryDb", null, null, null, null, hiveConf, null, null, logger); + when(hiveConf.getVar(HiveConf.ConfVars.REPL_FUNCTIONS_ROOT_DIR)) + .thenReturn("/someBasePath/withADir/"); + function = new PrimaryToReplicaResourceFunction(context, metadata, "replicaDbName"); + } + + @Test + public void createDestinationPath() throws IOException, SemanticException, URISyntaxException { + mockStatic(FileSystem.class); + when(FileSystem.get(any(Configuration.class))).thenReturn(mockFs); + when(mockFs.getScheme()).thenReturn("hdfs"); + when(mockFs.getUri()).thenReturn(new URI("hdfs", "somehost:9000", null, null, null)); + mockStatic(System.class); + when(System.nanoTime()).thenReturn(Long.MAX_VALUE); + when(functionObj.getFunctionName()).thenReturn("someFunctionName"); + mockStatic(ReplCopyTask.class); + Task mock = mock(Task.class); + when(ReplCopyTask.getLoadCopyTask(any(ReplicationSpec.class), any(Path.class), any(Path.class), + any(HiveConf.class))).thenReturn(mock); + + ResourceUri resourceUri = function.destinationResourceUri(new ResourceUri(ResourceType.JAR, + "hdfs://localhost:9000/user/someplace/ab.jar#e094828883")); + + assertThat(resourceUri.getUri(), + is(equalTo( + "hdfs://somehost:9000/someBasePath/withADir/replicadbname/somefunctionname/" + String + .valueOf(Long.MAX_VALUE) + "/ab.jar"))); + } +} \ No newline at end of file diff --git service/src/test/org/apache/hive/service/cli/CLIServiceRestoreTest.java service/src/test/org/apache/hive/service/cli/CLIServiceRestoreTest.java deleted file mode 100644 index a049440..0000000 --- service/src/test/org/apache/hive/service/cli/CLIServiceRestoreTest.java +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.service.cli; - - -import org.apache.hadoop.hive.conf.HiveConf; - -import org.junit.Assert; -import org.junit.Test; - -public class CLIServiceRestoreTest { - - CLIService service = getService(); - - @Test - public void testRestore() throws HiveSQLException { - SessionHandle session = service.openSession("foo", "bar", null); - service.stop(); - service = getService(); - try { - service.getSessionManager().getSession(session); - Assert.fail("session already exists before restore"); - } catch (HiveSQLException e) { - Assert.assertTrue(e.getMessage().contains("Invalid SessionHandle")); - } - service.createSessionWithSessionHandle(session, "foo", "bar", null); - Assert.assertNotNull(service.getSessionManager().getSession(session)); - service.stop(); - } - - public CLIService getService() { - HiveConf conf = new HiveConf(); - conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, - "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); - CLIService service = new CLIService(null); - service.init(conf); - service.start(); - return service; - } -} diff --git service/src/test/org/apache/hive/service/cli/TestCLIServiceRestore.java service/src/test/org/apache/hive/service/cli/TestCLIServiceRestore.java new file mode 100644 index 0000000..a4f0be0 --- /dev/null +++ service/src/test/org/apache/hive/service/cli/TestCLIServiceRestore.java @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.service.cli; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.junit.Assert; +import org.junit.Test; + +public class TestCLIServiceRestore { + + CLIService service = getService(); + + @Test + public void testRestore() throws HiveSQLException { + SessionHandle session = service.openSession("foo", "bar", null); + service.stop(); + service = getService(); + try { + service.getSessionManager().getSession(session); + Assert.fail("session already exists before restore"); + } catch (HiveSQLException e) { + Assert.assertTrue(e.getMessage().contains("Invalid SessionHandle")); + } + service.createSessionWithSessionHandle(session, "foo", "bar", null); + Assert.assertNotNull(service.getSessionManager().getSession(session)); + service.stop(); + } + + public CLIService getService() { + HiveConf conf = new HiveConf(); + conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, + "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); + CLIService service = new CLIService(null); + service.init(conf); + service.start(); + return service; + } +} diff --git storage-api/src/test/org/apache/hadoop/hive/ql/util/JavaDataModelTest.java storage-api/src/test/org/apache/hadoop/hive/ql/util/JavaDataModelTest.java deleted file mode 100644 index 7cd2e12..0000000 --- storage-api/src/test/org/apache/hadoop/hive/ql/util/JavaDataModelTest.java +++ /dev/null @@ -1,76 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.util; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertSame; - -public final class JavaDataModelTest { - - private static final String DATA_MODEL_PROPERTY = "sun.arch.data.model"; - - private String previousModelSetting; - - @Before - public void setUp() throws Exception { - previousModelSetting = System.getProperty(DATA_MODEL_PROPERTY); - } - - @After - public void tearDown() throws Exception { - if (previousModelSetting != null) { - System.setProperty(DATA_MODEL_PROPERTY, previousModelSetting); - } else { - System.clearProperty(DATA_MODEL_PROPERTY); - } - } - - @Test - public void testGetDoesNotReturnNull() throws Exception { - JavaDataModel model = JavaDataModel.get(); - assertNotNull(model); - } - - @Test - public void testGetModelForSystemWhenSetTo32() throws Exception { - System.setProperty(DATA_MODEL_PROPERTY, "32"); - assertSame(JavaDataModel.JAVA32, JavaDataModel.getModelForSystem()); - } - - @Test - public void testGetModelForSystemWhenSetTo64() throws Exception { - System.setProperty(DATA_MODEL_PROPERTY, "64"); - assertSame(JavaDataModel.JAVA64, JavaDataModel.getModelForSystem()); - } - - @Test - public void testGetModelForSystemWhenSetToUnknown() throws Exception { - System.setProperty(DATA_MODEL_PROPERTY, "unknown"); - assertSame(JavaDataModel.JAVA64, JavaDataModel.getModelForSystem()); - } - - @Test - public void testGetModelForSystemWhenUndefined() throws Exception { - System.clearProperty(DATA_MODEL_PROPERTY); - assertSame(JavaDataModel.JAVA64, JavaDataModel.getModelForSystem()); - } -} \ No newline at end of file diff --git storage-api/src/test/org/apache/hadoop/hive/ql/util/TestJavaDataModel.java storage-api/src/test/org/apache/hadoop/hive/ql/util/TestJavaDataModel.java new file mode 100644 index 0000000..9295b89 --- /dev/null +++ storage-api/src/test/org/apache/hadoop/hive/ql/util/TestJavaDataModel.java @@ -0,0 +1,76 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.util; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertSame; + +public final class TestJavaDataModel { + + private static final String DATA_MODEL_PROPERTY = "sun.arch.data.model"; + + private String previousModelSetting; + + @Before + public void setUp() throws Exception { + previousModelSetting = System.getProperty(DATA_MODEL_PROPERTY); + } + + @After + public void tearDown() throws Exception { + if (previousModelSetting != null) { + System.setProperty(DATA_MODEL_PROPERTY, previousModelSetting); + } else { + System.clearProperty(DATA_MODEL_PROPERTY); + } + } + + @Test + public void testGetDoesNotReturnNull() throws Exception { + JavaDataModel model = JavaDataModel.get(); + assertNotNull(model); + } + + @Test + public void testGetModelForSystemWhenSetTo32() throws Exception { + System.setProperty(DATA_MODEL_PROPERTY, "32"); + assertSame(JavaDataModel.JAVA32, JavaDataModel.getModelForSystem()); + } + + @Test + public void testGetModelForSystemWhenSetTo64() throws Exception { + System.setProperty(DATA_MODEL_PROPERTY, "64"); + assertSame(JavaDataModel.JAVA64, JavaDataModel.getModelForSystem()); + } + + @Test + public void testGetModelForSystemWhenSetToUnknown() throws Exception { + System.setProperty(DATA_MODEL_PROPERTY, "unknown"); + assertSame(JavaDataModel.JAVA64, JavaDataModel.getModelForSystem()); + } + + @Test + public void testGetModelForSystemWhenUndefined() throws Exception { + System.clearProperty(DATA_MODEL_PROPERTY); + assertSame(JavaDataModel.JAVA64, JavaDataModel.getModelForSystem()); + } +} \ No newline at end of file