diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index dc31505a44..a5d8946573 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1845,7 +1845,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "filter operators."), // Concurrency - HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", false, + HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", true, "Whether Hive supports concurrency control or not. \n" + "A ZooKeeper instance must be up and running when using zookeeper Hive lock manager "), HIVE_LOCK_MANAGER("hive.lock.manager", "org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager", ""), @@ -1894,7 +1894,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal // Transactions HIVE_TXN_MANAGER("hive.txn.manager", - "org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager", + "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager", "Set to org.apache.hadoop.hive.ql.lockmgr.DbTxnManager as part of turning on Hive\n" + "transactions, which also requires appropriate settings for hive.compactor.initiator.on,\n" + "hive.compactor.worker.threads, hive.support.concurrency (true),\n" + @@ -2978,7 +2978,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal + "When it is set to false, only [a-zA-Z_0-9]+ are supported.\n" + "The only supported special character right now is '/'. This flag applies only to quoted table names.\n" + "The default value is true."), - HIVE_CREATE_TABLES_AS_INSERT_ONLY("hive.create.as.insert.only", false, + HIVE_CREATE_TABLES_AS_INSERT_ONLY("hive.create.as.insert.only", true, "Whether the eligible tables should be created as ACID insert-only by default. Does \n" + "not apply to external tables, the ones using storage handlers, etc."), // role names are case-insensitive diff --git data/conf/llap/hive-site.xml data/conf/llap/hive-site.xml index cdda875ddb..74a5030d98 100644 --- data/conf/llap/hive-site.xml +++ data/conf/llap/hive-site.xml @@ -348,4 +348,19 @@ 99 + + hive.support.concurrency + true + + + + hive.txn.manager + org.apache.hadoop.hive.ql.lockmgr.DbTxnManager + + + + hive.create.as.insert.only + true + + diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java index bce246f7fb..c3bafc25ba 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java @@ -95,7 +95,6 @@ protected void setUp() throws Exception { hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hcatConf.setTimeVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 60, TimeUnit.SECONDS); - hcatConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); clientWH = new Warehouse(hcatConf); msc = new HiveMetaStoreClient(hcatConf); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); @@ -192,7 +191,6 @@ public void testCustomPerms() throws Exception { private void callHCatCli(String[] args) { List argsList = new ArrayList(); - argsList.add("-Dhive.support.concurrency=false"); argsList .add("-Dhive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); argsList.addAll(Arrays.asList(args)); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java index 1606982574..0f5b45dcfd 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java @@ -43,7 +43,6 @@ protected void setUp() throws Exception { HiveConf hcatConf = new HiveConf(this.getClass()); hcatConf.set(ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(ConfVars.POSTEXECHOOKS.varname, ""); - hcatConf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName()); hcatDriver = new Driver(hcatConf); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java index c77bc48e6c..1e9e6f0d60 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java @@ -235,8 +235,6 @@ public LocalMetaServer() { HCatSemanticAnalyzer.class.getName()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); } diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java index ff56234cc1..9ffa88bc8b 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java @@ -47,7 +47,7 @@ public static Driver instantiateDriver(MiniCluster cluster) { } hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); LOG.debug("Hive conf : {}", hiveConf.getAllProperties()); Driver driver = new Driver(hiveConf); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java index f78eb15622..f64dffaa0f 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java @@ -79,7 +79,7 @@ protected void setUpHiveConf() { hiveConf = new HiveConf(this.getClass()); hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, ""); hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, ""); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, TEST_WAREHOUSE_DIR); hiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); hiveConf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java index ba42ffd627..2b54342162 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java @@ -189,7 +189,6 @@ private static void initializeSetup() throws Exception { HCatSemanticAnalyzer.class.getName()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java index 091efb61d4..499520bfa2 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java @@ -121,8 +121,6 @@ public static void setup() throws Exception { HCatSemanticAnalyzer.class.getName()); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - "false"); msc = new HiveMetaStoreClient(hcatConf); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java index 31857bf643..b2a3448b89 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java @@ -62,7 +62,6 @@ public void Initialize() throws Exception { hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java index 4a6c6a3a6f..daa09e66f0 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java @@ -86,7 +86,6 @@ public void setUp() throws Exception { HiveConf hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java index ea9cdda31c..5652929b85 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java @@ -120,7 +120,6 @@ public static void setUpBeforeClass() throws Exception { HiveConf hiveConf = new HiveConf(TestHCatLoaderComplexSchema.class); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java index 496f3c85da..ca2ec60759 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java @@ -163,7 +163,6 @@ public void setup() throws Exception { HiveConf hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java index 40ea923858..734547337e 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java @@ -107,7 +107,6 @@ public void setUp() throws Exception { HiveConf hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); hiveConf diff --git hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java index 9f7c207214..a8b70ecbcc 100644 --- hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java +++ hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java @@ -69,7 +69,6 @@ protected void setUp() throws Exception { hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.METASTOREURIS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); hiveConf.set(HCatConstants.HCAT_MSGBUS_TOPIC_PREFIX, "planetlab.hcat"); diff --git hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java index 49aad392d8..4dc9f1cfbc 100644 --- hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java +++ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java @@ -206,7 +206,6 @@ public TestStreaming() throws Exception { conf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); } conf.setBoolVar(HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI, true); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); dbFolder.create(); diff --git hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java index 78e767e7fc..19f64c8529 100644 --- hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java +++ hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java @@ -128,8 +128,6 @@ public static void startMetaStoreServer() throws Exception { HCatSemanticAnalyzer.class.getName()); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); } diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java index 2fe4045835..b3bb5d9f3b 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java @@ -299,7 +299,7 @@ private void setUpMetastore() throws Exception { //is present only in the ql/test directory hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, "jdbc:derby:" + new File(workDir + "/metastore_db") + ";create=true"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.toString(), diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java index 120b4af826..d1da0f3f75 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java @@ -78,7 +78,6 @@ public void Initialize() throws Exception { URI fsuri = getFileSystem().getUri(); Path whPath = new Path(fsuri.getScheme(), fsuri.getAuthority(), getTestDir()); - hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hcatConf.set(ConfVars.METASTOREWAREHOUSE.varname, whPath.toString()); diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java index 308ab0d36a..8f9896c3f9 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java @@ -237,7 +237,6 @@ public static void connectToMetastore() throws Exception { DbNotificationListener.class.getName()); conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS, MockMetaStoreEventListener.class.getName()); conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_DB_LISTENER_TTL, String.valueOf(EVENTS_TTL) + "s"); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.FIRE_EVENTS_FOR_DML, true); conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL, DummyRawStoreFailEvent.class.getName()); diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java index d2dbe8f287..9db93944d5 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java @@ -74,7 +74,6 @@ public void setup() throws Exception { hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, warehouseDir); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java index 0f0ae547f3..eef43744a2 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java @@ -55,7 +55,7 @@ public static void beforeTestBase() throws Exception { hiveConf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); hiveConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java index 1cd0ee8842..c968d0c825 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java @@ -54,7 +54,6 @@ public static void setUpBeforeClass() throws Exception { confOverlay.put(ConfVars.PREEXECHOOKS.varname, PreExecHook.class.getName()); confOverlay.put(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, SemanticAnalysisHook.class.getName()); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "" + Boolean.FALSE); HiveConf hiveConf = new HiveConf(); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java index 5e70d68803..ca104d8af9 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java @@ -55,7 +55,6 @@ public static void beforeTest() throws Exception { // set a small time unit as cookie max age so that the server sends a 401 hiveConf.setTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_COOKIE_MAX_AGE, 1, TimeUnit.SECONDS); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); miniHS2.start(new HashMap()); diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java index 3153b9f9c0..cdbf6bd075 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java @@ -47,7 +47,6 @@ public static void beforeTest() throws Exception { HiveConf hiveConf = new HiveConf(); SSLTestUtils.setMetastoreSslConf(hiveConf); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); diff --git itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java index 6cbcf8ca7c..b5df7474e2 100644 --- itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java +++ itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java @@ -85,7 +85,6 @@ public static void setBinaryConfOverlay(Map confOverlay) { public static void setupTestTableWithData(String tableName, Path dataFilePath, Connection hs2Conn) throws Exception { Statement stmt = hs2Conn.createStatement(); - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop table if exists " + tableName); stmt.execute("create table " + tableName diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java index 7f2517b1b8..3fe14c206b 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java @@ -146,7 +146,6 @@ public static void setUpBeforeClass() throws Exception { PostExecHook.class.getName()); hiveConf.setVar(ConfVars.SEMANTIC_ANALYZER_HOOK, SemanticAnalysisHook.class.getName()); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java index f8ed4196bc..1ea1ee5650 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java @@ -64,7 +64,7 @@ protected static void setup() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); msc = new HiveMetaStoreClient(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java index 91fc706d51..f45115fea6 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java @@ -175,7 +175,6 @@ public static void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.setVar(ConfVars.METASTORE_FILTER_HOOK, DummyMetaStoreFilterHookImpl.class.getName()); UtilsForTest.setNewDerbyDbLocation(hiveConf, TestFilterHooks.class.getSimpleName()); int port = MetaStoreTestUtils.startMetaStoreWithRetry(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java index 4b95fe7be6..8765f6d469 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java @@ -53,7 +53,6 @@ public static void startServices() throws Exception { HiveConf hiveConf = new HiveConf(); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 2); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 2); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder().withMiniMR().withRemoteMetastore().withConf(hiveConf).build(); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java index c29a34dc37..d334b0a97f 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java @@ -81,7 +81,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java index 7cf351fb35..464a0eff93 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java @@ -52,7 +52,6 @@ protected void setUp() throws Exception { hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); } diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java index e44cfca8ee..116ba3e8bb 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java @@ -55,7 +55,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java index e8031066c2..d0a7dbcc4f 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java @@ -104,7 +104,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java index 9623fedff9..cba775ee01 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java @@ -58,7 +58,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java index 49823134a7..abf671eaee 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java @@ -48,7 +48,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java index 6f5a963e16..926c0f9040 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java @@ -44,7 +44,6 @@ public static void before() throws Exception { hiveConf = new HiveConf(TestMetaStoreMetrics.class); hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_METRICS, true); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java index 0a034d3593..a9e6508775 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java @@ -54,7 +54,6 @@ protected void setUp() throws Exception { System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); System.setProperty(HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL.toString(), "true"); hiveConf = new HiveConf(this.getClass()); - System.setProperty("hive.support.concurrency", "false"); System.setProperty("hive.metastore.event.listeners", DummyListener.class.getName()); System.setProperty("hive.metastore.pre.event.listeners", diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java index 62d0109b5f..35889bf0ee 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java @@ -119,7 +119,6 @@ public static void setUpBeforeClass() throws Exception { con = getConnection(miniHS2.getBaseJdbcURL() + ";create=true"); try (Statement stmt = con.createStatement()) { Assert.assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop database if exists " + testDbName + " cascade"); stmt.execute("create database " + testDbName); } @@ -131,7 +130,6 @@ public static void tearDownAfterClass() { try { stmt = con.createStatement(); // drop test db and its tables and views - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop database if exists " + testDbName + " cascade"); FileSystem fs = FileSystem.get(conf); fs.deleteOnExit(ShimLoader.getHadoopShims().getCurrentTrashPath(conf, fs)); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java index 179eed95d0..b297bbc270 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java @@ -83,7 +83,6 @@ protected void setUp() throws Exception { int port = MetaStoreTestUtils.startMetaStoreWithRetry(); conf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); conf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - conf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, new URI(tmppath + "/warehouse").getPath()); // Initialize second mocked filesystem (implement only necessary stuff) diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java index e78318035a..276f5e36b5 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java @@ -68,7 +68,6 @@ public static void beforeTest() throws Exception { conf = new HiveConf(); DriverManager.setLoginTimeout(0); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setIntVar(HiveConf.ConfVars.METASTORE_LIMIT_PARTITION_REQUEST, PARTITION_REQUEST_LIMIT); conf.setBoolVar(HiveConf.ConfVars.METASTORE_INTEGER_JDO_PUSHDOWN, true); conf.setBoolVar(HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL, true); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java index d73cd6426c..febe06868a 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java @@ -80,7 +80,6 @@ protected void setUp() { } } - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // copy the test files into hadoop if required. int i = 0; diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java index 2170ca3706..a22beb5342 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java @@ -35,7 +35,6 @@ public void testHookLoading() throws Exception{ HiveConf conf = new HiveConf(this.getClass()); conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, DummySemanticAnalyzerHook.class.getName()); - conf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(conf); Driver driver = new Driver(conf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index 55acd1df36..268967dd80 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -147,7 +147,6 @@ public static void setUpBeforeClass() throws Exception { hconf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hconf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hconf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hconf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hconf.set(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL.varname, "org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore"); hconf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java index cde7a3e33c..a4be42b88f 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java @@ -121,7 +121,7 @@ private void initialize(String cmRoot, String warehouseRoot, hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java index dc3af3c186..e51b55d573 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java @@ -78,7 +78,7 @@ public void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java index 6a668aa40c..2639f58703 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java @@ -66,7 +66,6 @@ protected void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java index 57ff8c9ae7..39969f789a 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java @@ -76,7 +76,6 @@ protected void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java index edb46fd979..bafdeceab7 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java @@ -109,7 +109,6 @@ protected void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java index 2059370fd4..e7378a2051 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java @@ -63,7 +63,6 @@ public static void setUp() throws Exception { clientHiveConf = new HiveConf(); clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(clientHiveConf)); msc = new HiveMetaStoreClient(clientHiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java index 19694b093e..30337510e6 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java @@ -97,7 +97,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, true); conf.setVar(ConfVars.HIVE_TXN_MANAGER, DbTxnManager.class.getName()); conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java index 5922a8c603..758df3ab92 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java @@ -119,7 +119,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); UtilsForTest.setNewDerbyDbLocation(conf, TestHiveAuthorizerShowFilters.class.getSimpleName()); SessionState.start(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java index 065f258abc..775a6b3b5d 100644 --- itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java +++ itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java @@ -116,8 +116,6 @@ private static void createTable() throws ClassNotFoundException, SQLException { Statement stmt = con.createStatement(); assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); - HiveConf conf = new HiveConf(); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); @@ -461,8 +459,7 @@ public void testBeelineShellCommand() throws Throwable { */ @Test public void testNullDefault() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "select null from " + tableName + " limit 1 ;\n"; + final String SCRIPT_TEXT = "select null from " + tableName + " limit 1 ;\n"; final String EXPECTED_PATTERN = "NULL"; testScriptFile(SCRIPT_TEXT, getBaseArgs(miniHS2.getBaseJdbcURL()), EXPECTED_PATTERN, true); } @@ -473,8 +470,7 @@ public void testNullDefault() throws Throwable { */ @Test public void testNullNonEmpty() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "!set nullemptystring false\n select null from " + tableName + " limit 1 ;\n"; + final String SCRIPT_TEXT = "!set nullemptystring false\n select null from " + tableName + " limit 1 ;\n"; final String EXPECTED_PATTERN = "NULL"; testScriptFile(SCRIPT_TEXT, getBaseArgs(miniHS2.getBaseJdbcURL()), EXPECTED_PATTERN, true); } @@ -494,8 +490,7 @@ public void testGetVariableValue() throws Throwable { */ @Test public void testNullEmpty() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "!set nullemptystring true\n select 'abc',null,'def' from " + tableName + " limit 1 ;\n"; + final String SCRIPT_TEXT = "!set nullemptystring true\n select 'abc',null,'def' from " + tableName + " limit 1 ;\n"; final String EXPECTED_PATTERN = "abc,,def"; List argList = getBaseArgs(miniHS2.getBaseJdbcURL()); @@ -662,13 +657,11 @@ public void testCSVOutput() throws Throwable { } private String getFormatTestQuery() { - return "set hive.support.concurrency = false;\n" + - "select 1, null, 'defg', 'ab\"c', 1.0D from " + tableName + " limit 1 ;\n"; + return "select 1, null, 'defg', 'ab\"c', 1.0D from " + tableName + " limit 1 ;\n"; } private String getFormatTestQueryForEableQuotes() { - return "set hive.support.concurrency = false;\n" + - "select 1, null, 'defg', 'ab\"c', '\"aa\"', 1.0D from " + tableName + " limit 1 ;\n"; + return "select 1, null, 'defg', 'ab\"c', '\"aa\"', 1.0D from " + tableName + " limit 1 ;\n"; } /** @@ -679,8 +672,7 @@ private String getFormatTestQueryForEableQuotes() { */ @Test public void testNullEmptyCmdArg() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "select 'abc',null,'def' from " + tableName + " limit 1 ;\n"; + final String SCRIPT_TEXT = "select 'abc',null,'def' from " + tableName + " limit 1 ;\n"; final String EXPECTED_PATTERN = "'abc','','def'"; List argList = getBaseArgs(miniHS2.getBaseJdbcURL()); @@ -771,8 +763,7 @@ public void testEmbeddedBeelineConnection() throws Throwable{ @Test public void testQueryProgress() throws Throwable { final String SCRIPT_TEXT = - "set hive.support.concurrency = false;\n" - + "set hive.server2.logging.operation.level=execution;\n" + "set hive.server2.logging.operation.level=execution;\n" + "select count(*) from " + tableName + ";\n"; // Check for part of log message as well as part of progress information final String EXPECTED_PATTERN = "ELAPSED TIME"; @@ -797,8 +788,7 @@ public void testQueryProgress() throws Throwable { */ @Test public void testQueryProgressParallel() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "set hive.exec.parallel = true;\n" + + final String SCRIPT_TEXT = "set hive.exec.parallel = true;\n" + "select count(*) from " + tableName + ";\n"; // Check for part of log message as well as part of progress information final String EXPECTED_PATTERN = "Number of reducers determined to be."; @@ -813,8 +803,7 @@ public void testQueryProgressParallel() throws Throwable { */ @Test public void testQueryProgressHidden() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "!set silent true\n" + + final String SCRIPT_TEXT = "!set silent true\n" + "select count(*) from " + tableName + ";\n"; final String EXPECTED_PATTERN = "Executing command"; testScriptFile(SCRIPT_TEXT, getBaseArgs(miniHS2.getBaseJdbcURL()), OutStream.ERR, @@ -825,7 +814,7 @@ public void testQueryProgressHidden() throws Throwable { public void testQueryProgressWithHiveServer2ProgressBarDisabled() throws Throwable { final String SCRIPT_TEXT = - "set hive.support.concurrency = false;\nset hive.server2.in.place.progress=false;\n" + + "set hive.server2.in.place.progress=false;\n" + "select count(*) from " + tableName + ";\n"; // Check for part of log message as well as part of progress information final String EXPECTED_PATTERN = "(?=Reducer 2\\:).*(?=Map 1\\:)"; diff --git itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java index 5dc1465d64..b030b646a9 100644 --- itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java +++ itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java @@ -304,7 +304,6 @@ private static void createTable() throws ClassNotFoundException, SQLException { Statement stmt = con.createStatement(); assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); try { stmt.execute("drop table if exists " + tableName); } catch (Exception ex) { diff --git itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java index 2fb64536a6..3738aae3a7 100644 --- itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java +++ itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java @@ -161,7 +161,7 @@ public void before() throws Exception { hiveConf = new HiveConf(); miniHS2 = getNewMiniHS2(); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, "binary"); } diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java index 62ee66f717..039846b7de 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java @@ -66,7 +66,7 @@ public static void beforeTest() throws Exception { System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setVar(ConfVars.HIVE_SERVER2_TEZ_DEFAULT_QUEUES, "default"); conf.setTimeVar(ConfVars.HIVE_TRIGGER_VALIDATION_INTERVAL_MS, 100, TimeUnit.MILLISECONDS); @@ -196,4 +196,4 @@ WMTrigger wmTriggerFromTrigger(Trigger trigger) { result.setActionExpression(trigger.getAction().toString()); return result; } -} \ No newline at end of file +} diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java index 7bbafa4e1b..35320b5b13 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -199,7 +199,6 @@ public static void setUpBeforeClass() throws SQLException, ClassNotFoundExceptio con = getConnection(defaultDbName + ";create=true"); Statement stmt = con.createStatement(); assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop database if exists " + testDbName + " cascade"); stmt.execute("create database " + testDbName); stmt.execute("use " + testDbName); @@ -211,7 +210,6 @@ public static void setUpBeforeClass() throws SQLException, ClassNotFoundExceptio public static void tearDownAfterClass() throws Exception { Statement stmt = con.createStatement(); // drop test db and its tables and views - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop database if exists " + testDbName + " cascade"); stmt.close(); con.close(); @@ -2645,7 +2643,6 @@ public void testNonAsciiReturnValues() throws Exception { String nonAsciiString = "Garçu Kôkaku kidôtai"; Path nonAsciiFilePath = new Path(dataFileDir, "non_ascii_tbl.txt"); Statement stmt = con.createStatement(); - stmt.execute("set hive.support.concurrency = false"); // Create table stmt.execute("create table " + nonAsciiTableName + " (key int, value string) " diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java index dd24f0261f..c97d0df636 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java @@ -82,12 +82,10 @@ private static HiveConf createHiveConf() { public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); conf = createHiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); DriverManager.setLoginTimeout(0); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2(conf, MiniClusterType.MR); Map overlayProps = new HashMap(); overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java index 84644d1d89..5518c32ba7 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java @@ -68,12 +68,10 @@ public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLExcepti public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); DriverManager.setLoginTimeout(0); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder().withConf(conf).withMiniMR().withHA().build(); Map overlayProps = new HashMap(); overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index 70bd29c517..12193d041c 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -162,7 +162,6 @@ private static void createTestTables(Connection conn, String dbName) throws SQLE public static void tearDownAfterClass() throws Exception { // drop test db and its tables and views Statement stmt = conDefault.createStatement(); - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop database if exists " + testDbName + " cascade"); stmt.close(); if (conTestDb != null) { @@ -202,7 +201,6 @@ private static void startMiniHS2(HiveConf conf) throws Exception { } private static void startMiniHS2(HiveConf conf, boolean httpMode) throws Exception { - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false); conf.setBoolVar(ConfVars.HIVESTATSCOLAUTOGATHER, false); MiniHS2.Builder builder = new MiniHS2.Builder().withConf(conf).cleanupLocalDirOnStartup(false); @@ -431,7 +429,6 @@ public void testURIDatabaseName() throws Exception{ conn = getConnection(jdbcUri+dbName,System.getProperty("user.name"),"bar"); stmt = conn .createStatement(); - stmt.execute("set hive.support.concurrency = false"); res = stmt.executeQuery("show tables"); stmt.execute(" drop table if exists table_in_non_default_schema"); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java index 71aee8f959..7eb4fe5b47 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java @@ -116,7 +116,6 @@ public static void beforeTest() throws Exception { } conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.addResource(new URL("file://" + new File(confDir).toURI().getPath() @@ -139,7 +138,6 @@ public void setUp() throws Exception { public static Connection getConnection(String jdbcURL, String user, String pwd) throws SQLException { Connection conn = DriverManager.getConnection(jdbcURL, user, pwd); - conn.createStatement().execute("set hive.support.concurrency = false"); return conn; } @@ -600,4 +598,4 @@ public void run() { private static class ExceptionHolder { Throwable throwable; } -} \ No newline at end of file +} diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java index 2156f4b4de..bbb82c29ed 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java @@ -91,12 +91,10 @@ private static HiveConf createHiveConf() { public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); conf = createHiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); DriverManager.setLoginTimeout(0); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2(conf, MiniClusterType.MR); Map overlayProps = new HashMap(); overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java index 84f1168b28..ef578d8844 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java @@ -59,7 +59,6 @@ public void run(HiveSessionHookContext sessionHookContext) public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); HiveConf conf = new HiveConf(); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setVar(ConfVars.HIVE_SERVER2_SESSION_HOOK, NoSaslSessionHook.class.getName()); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java index dc59f4b7fb..842ab8abc3 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java @@ -56,7 +56,6 @@ public static void beforeTest() throws Exception { zkServer = new TestingServer(); Class.forName(MiniHS2.getJdbcDriverName()); hiveConf = new HiveConf(); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // Set up zookeeper dynamic service discovery configs enableZKServiceDiscoveryConfigs(hiveConf); dataFileDir = hiveConf.get("test.data.files").replace('\\', '/').replace("c:", ""); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java index 74ca958ea8..e69122e7f1 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java @@ -57,7 +57,6 @@ public static void beforeTest() throws Exception { System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setTimeVar(ConfVars.HIVE_TRIGGER_VALIDATION_INTERVAL_MS, 100, TimeUnit.MILLISECONDS); conf.setVar(ConfVars.HIVE_SERVER2_TEZ_INTERACTIVE_QUEUE, "default"); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java index 285e533fe6..fa3313c409 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java @@ -49,7 +49,6 @@ public static void beforeTest() throws Exception { System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setTimeVar(ConfVars.HIVE_TRIGGER_VALIDATION_INTERVAL_MS, 100, TimeUnit.MILLISECONDS); conf.setVar(ConfVars.HIVE_SERVER2_TEZ_INTERACTIVE_QUEUE, "default"); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java index 88a403a0d5..caca08685e 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java @@ -66,7 +66,6 @@ public static void afterClass() throws IOException { private void initHS2(boolean enableXSRFFilter) throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); HiveConf conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder().withConf(conf).cleanupLocalDirOnStartup(false).build(); dataFileDir = conf.get("test.data.files").replace('\\', '/').replace("c:", ""); kvDataFilePath = new Path(dataFileDir, "kv1.txt"); @@ -78,7 +77,6 @@ private void initHS2(boolean enableXSRFFilter) throws Exception { private Connection getConnection(String jdbcURL, String user, String pwd) throws SQLException { Connection conn = DriverManager.getConnection(jdbcURL, user, pwd); - conn.createStatement().execute("set hive.support.concurrency = false"); return conn; } diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java index 9a44dfab1d..ab4dc70242 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java @@ -63,7 +63,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // once SessionState for thread is set, CliDriver picks conf from it CliSessionState ss = new CliSessionState(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java index 273ec36d81..fc3e63a40b 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java @@ -78,7 +78,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java index eb29e228e1..622fb23c1a 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java @@ -64,7 +64,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java index 692bfa0d89..64ecbdc567 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java @@ -106,7 +106,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, TestAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java index 5070c765c9..a2e6c4a8d0 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java @@ -48,7 +48,6 @@ public void startHS2(HiveConf conf) throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, SQLStdHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java index 5e653ec75c..b5656a009f 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java @@ -50,7 +50,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, SQLStdHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java index 68a2c6719b..fdcd0a3180 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java @@ -116,8 +116,6 @@ public static void setUpBeforeClass() throws SQLException, ClassNotFoundExceptio Statement stmt1 = con1.createStatement(); assertNotNull("Statement is null", stmt1); - stmt1.execute("set hive.support.concurrency = false"); - DatabaseMetaData metadata = con1.getMetaData(); // Drop databases created by other test cases @@ -141,7 +139,6 @@ public void setUp() throws Exception { Statement stmt = con.createStatement(); assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); stmt.execute("set hive.cbo.returnpath.hiveop = true"); // drop table. ignore error. @@ -2263,7 +2260,6 @@ public void testNonAsciiReturnValues() throws Exception { String nonAsciiString = "Garçu Kôkaku kidôtai"; Path nonAsciiFilePath = new Path(dataFileDir, "non_ascii_tbl.txt"); Statement stmt = con.createStatement(); - stmt.execute("set hive.support.concurrency = false"); // Create table stmt.execute("create table " + nonAsciiTableName + " (key int, value string) " + diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java index d176db4279..d3894411bf 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java @@ -61,7 +61,7 @@ public void testRunCluster() throws Exception { break; } HiveConf conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_RPC_QUERY_PLAN, true); for (; idx < confFiles.length; ++idx) { diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java index d8c6beaee4..4218f99fc8 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java @@ -45,7 +45,6 @@ public static void beforeTest() throws Exception { miniHS2 = new MiniHS2(new HiveConf()); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java index c4da73e208..d44e3c68ba 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java @@ -49,7 +49,6 @@ public static void beforeTest() throws Exception { @Before public void setUp() throws Exception { confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); confOverlay.put(ConfVars.HIVE_SERVER2_SESSION_CHECK_INTERVAL.varname, "3s"); confOverlay.put(ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT.varname, "3s"); miniHS2.start(confOverlay); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java index 7337e9cbee..8157abb70b 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java @@ -75,10 +75,8 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, public static void setup() throws Exception { miniHS2 = new MiniHS2(new HiveConf()); confOverlay = new HashMap(); - confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); confOverlay.put(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, MetricCheckingHook.class.getName()); confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED.varname, "true"); - confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java index 18892e77d7..8bc72769f8 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java @@ -67,7 +67,6 @@ public void testConfInSession() throws Exception { hiveConf.set(ZK_TIMEOUT_KEY, ZK_TIMEOUT); // check the config used very often! - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2(hiveConf); miniHS2.start(new HashMap()); @@ -78,8 +77,6 @@ public void testConfInSession() throws Exception { checkConfVal(DUMMY_CONF_KEY, DUMMY_CONF_KEY + "=" + DUMMY_CONF_VAL, stmt); checkConfVal(ZK_TIMEOUT_KEY, ZK_TIMEOUT_KEY + "=" + ZK_TIMEOUT, stmt); - checkConfVal(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + "=" + "false", stmt); stmt.close(); hs2Conn.close(); diff --git itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java index c58767fc75..a3bcde11ce 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java @@ -71,7 +71,6 @@ public static void startServices() throws Exception { hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 1); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 1); hiveConf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI, true); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // Setting hive.server2.enable.doAs to True ensures that HS2 performs the query operation as // the connected user instead of the user running HS2. diff --git itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java index fd89921d34..5660b9cf11 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java @@ -49,7 +49,6 @@ public static void startServices() throws Exception { hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 1); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 1); hiveConf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI, true); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder() .withMiniMR() diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java index 830ffc2bbd..b51bca8a84 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java @@ -66,7 +66,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.set(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "verbose"); miniHS2 = new MiniHS2(hiveConf); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java index 388486d970..f4963452d5 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java @@ -62,7 +62,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.setBoolVar(ConfVars.TEZ_EXEC_SUMMARY, false); miniHS2 = new MiniHS2(hiveConf, MiniClusterType.TEZ); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } } diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java index 8febe3e79f..4f685174af 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java @@ -72,7 +72,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "execution"); miniHS2 = new MiniHS2(hiveConf); confOverlay = new HashMap(); - confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java index 32e2fc9489..e02fefc0d0 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java @@ -46,7 +46,6 @@ @Before public void setup() { conf = new HiveConf(); - conf.set("hive.support.concurrency", "false"); sessionManager = new SessionManager(null); sessionManager.init(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java index 147f53bdf1..f454f8eed6 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java @@ -125,7 +125,6 @@ public void testMessageSize() throws Exception { Connection connection = DriverManager.getConnection(url, "hiveuser", "hive"); Statement stmt = connection.createStatement(); assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); connection.close(); stopHiveServer2(hiveServer2); diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java index 1911d2ce17..6b5250a819 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java @@ -137,7 +137,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NOSASL.toString()); hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, transportMode); hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH, thriftHttpPath); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); hiveConf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index f5a5e713bb..b6b73a6c5e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -503,8 +503,11 @@ private DataContainer handleDynParts(Hive db, Table table, LoadTableDesc tbd, (tbd.getLbCtx() == null) ? 0 : tbd.getLbCtx().calculateListBucketingLevel(), work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID && !tbd.isMmTable(), - work.getLoadTableWork().getTxnId(), tbd.getStmtId(), hasFollowingStatsTask(), - work.getLoadTableWork().getWriteType()); + work.getLoadTableWork().getTxnId(), + tbd.getStmtId(), + hasFollowingStatsTask(), + work.getLoadTableWork().getWriteType(), + tbd.isInsertOverwrite()); // publish DP columns to its subscribers if (dps != null && dps.size() > 0) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index d68d646eb4..1c95843f1f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -4063,7 +4063,7 @@ private static void tryDelete(FileSystem fs, Path path) { throws IOException { int skipLevels = dpLevels + lbLevels; if (filter == null) { - filter = new JavaUtils.IdPathFilter(txnId, stmtId, true); + filter = new JavaUtils.IdPathFilter(txnId, stmtId, true, false, isBaseDir); } if (skipLevels == 0) { return statusToPath(fs.listStatus(path, filter)); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java index a85713b350..449afd2810 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java @@ -1274,7 +1274,6 @@ public static boolean isFullAcidTable(CreateTableDesc td) { !AcidUtils.isInsertOnlyTable(td.getTblProps()); } - /** * Sets the acidOperationalProperties in the configuration object argument. * @param conf Mutable configuration object diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 50bdce89a4..06ddeae52a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -2100,7 +2100,7 @@ private void constructOneLBLocationMap(FileStatus fSta, * @throws HiveException */ private Set getValidPartitionsInPath( - int numDP, int numLB, Path loadPath, Long txnId, int stmtId, boolean isMmTable) throws HiveException { + int numDP, int numLB, Path loadPath, Long txnId, int stmtId, boolean isMmTable, boolean isInsertOverwrite) throws HiveException { Set validPartitions = new HashSet(); try { FileSystem fs = loadPath.getFileSystem(conf); @@ -2121,7 +2121,7 @@ private void constructOneLBLocationMap(FileStatus fSta, // where this is used; we always want to load everything; also the only case where // we have multiple statements anyway is union. Path[] leafStatus = Utilities.getMmDirectoryCandidates( - fs, loadPath, numDP, numLB, null, txnId, -1, conf, false); + fs, loadPath, numDP, numLB, null, txnId, -1, conf, isInsertOverwrite); for (Path p : leafStatus) { Path dpPath = p.getParent(); // Skip the MM directory that we have found. for (int i = 0; i < numLB; ++i) { @@ -2169,7 +2169,7 @@ private void constructOneLBLocationMap(FileStatus fSta, public Map, Partition> loadDynamicPartitions(final Path loadPath, final String tableName, final Map partSpec, final LoadFileType loadFileType, final int numDP, final int numLB, final boolean isAcid, final long txnId, final int stmtId, - final boolean hasFollowingStatsTask, final AcidUtils.Operation operation) + final boolean hasFollowingStatsTask, final AcidUtils.Operation operation, boolean isInsertOverwrite) throws HiveException { final Map, Partition> partitionsMap = @@ -2185,7 +2185,7 @@ private void constructOneLBLocationMap(FileStatus fSta, // Get all valid partition paths and existing partitions for them (if any) final Table tbl = getTable(tableName); final Set validPartitions = getValidPartitionsInPath(numDP, numLB, loadPath, txnId, stmtId, - AcidUtils.isInsertOnlyTable(tbl.getParameters())); + AcidUtils.isInsertOnlyTable(tbl.getParameters()), isInsertOverwrite); final int partsToLoad = validPartitions.size(); final AtomicInteger partitionsLoaded = new AtomicInteger(0); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 28e3621d32..6c9a1130b0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -7148,6 +7148,10 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) tableDesc.setWriter(fileSinkDesc); } + if (fileSinkDesc.getInsertOverwrite()) { + ltd.setInsertOverwrite(true); + } + if (SessionState.get().isHiveServerQuery() && null != table_desc && table_desc.getSerdeClassName().equalsIgnoreCase(ThriftJDBCBinarySerDe.class.getName()) && diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java index 831ca9f99a..67ae067580 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java @@ -172,7 +172,9 @@ public boolean isTargetRewritten() { return true; } // INSERT OVERWRITE - if (getLoadTableDesc() != null && getLoadTableDesc().getLoadFileType() == LoadFileType.REPLACE_ALL) { + if (getLoadTableDesc() != null && + (getLoadTableDesc().isInsertOverwrite() || + getLoadTableDesc().getLoadFileType() == LoadFileType.REPLACE_ALL)) { return true; } // CREATE TABLE ... AS diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java index 66a4aa11be..bb1b131821 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java @@ -40,6 +40,7 @@ //table specs are to be used private int stmtId; private Long currentTransactionId; + private boolean isInsertOverwrite; // TODO: the below seem like they should just be combined into partitionDesc private org.apache.hadoop.hive.ql.plan.TableDesc table; @@ -209,6 +210,10 @@ public void setInheritTableSpecs(boolean inheritTableSpecs) { this.inheritTableSpecs = inheritTableSpecs; } + public boolean isInsertOverwrite() { return this.isInsertOverwrite; } + + public void setInsertOverwrite(boolean v) { this.isInsertOverwrite = v; } + /** * @return the lbCtx */ diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java index 9be3e14911..f5696a1d76 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java @@ -400,7 +400,6 @@ public void testFetchOperatorContextQuoting() throws Exception { @Test public void testFetchOperatorContext() throws Exception { HiveConf conf = new HiveConf(); - conf.set("hive.support.concurrency", "false"); conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java index a89e5e0dc0..5ebc480e49 100644 --- ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java @@ -90,7 +90,6 @@ public String redactQuery(String query) { } private static Driver createDriver(HiveConf conf) { - HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); Driver driver = new Driver(conf); return driver; diff --git ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java index 4d19a715c6..71c2c0d91b 100644 --- ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java @@ -49,7 +49,6 @@ @BeforeClass public static void setUpBeforeClass() { conf = new HiveConf(TestQueryHooks.class); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); } diff --git ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java index 976d83d55f..c614a3951e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java @@ -136,7 +136,6 @@ public void testCombine() throws Exception { .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_REWORK_MAPREDWORK, true); - HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); Driver drv = new Driver(hiveConf); String tblName = "text_symlink_text"; diff --git ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java index 406bdea96a..ff3f7fd8b5 100644 --- ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java +++ ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java @@ -390,7 +390,6 @@ public void concurrencyFalse() throws Exception { HiveConf badConf = new HiveConf(); badConf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager"); - badConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); boolean sawException = false; try { TxnManagerFactory.getTxnManagerFactory().getTxnManager(badConf); diff --git ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java index 913b60c353..57c17addc3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java +++ ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java @@ -63,7 +63,6 @@ @Before public void setUp() throws Exception { - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, DummyTxnManager.class.getName()); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java index a7a76a42cb..822ff85ce3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java +++ ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java @@ -129,7 +129,6 @@ public void testMetrics() throws Exception{ conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM, "localhost"); conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT, String.valueOf(server.getPort())); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); MetricsFactory.init(conf); CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java index 7bda832a3b..078a421979 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java @@ -187,7 +187,6 @@ private static Driver createDriver() { conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); - HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS, true); SessionState.start(conf); Driver driver = new Driver(conf); diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java index 79ce2f1769..4aa0dfb944 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java @@ -183,7 +183,6 @@ private static Driver createDriver() { "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, CheckInputReadEntityDirect.class.getName()); - HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); Driver driver = new Driver(conf); return driver; diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java index 234f9796c9..d9bb6f2ffa 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java @@ -62,8 +62,6 @@ public static void onetimeSetup() throws Exception { "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, CheckInputReadEntity.class.getName()); - HiveConf - .setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); driver = new Driver(conf); } diff --git ql/src/test/results/clientpositive/acid_table_stats.q.out ql/src/test/results/clientpositive/acid_table_stats.q.out index 8a25e5aeb7..5ff9453e20 100644 --- ql/src/test/results/clientpositive/acid_table_stats.q.out +++ ql/src/test/results/clientpositive/acid_table_stats.q.out @@ -95,7 +95,7 @@ Partition Parameters: numFiles 2 numRows 0 rawDataSize 0 - totalSize 3950 + totalSize 3980 #### A masked pattern was here #### # Storage Information @@ -133,9 +133,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid - Statistics: Num rows: 1 Data size: 3950 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 3980 Basic stats: PARTIAL Column stats: NONE Select Operator - Statistics: Num rows: 1 Data size: 3950 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 3980 Basic stats: PARTIAL Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -210,7 +210,7 @@ Partition Parameters: numFiles 2 numRows 1000 rawDataSize 208000 - totalSize 3950 + totalSize 3980 #### A masked pattern was here #### # Storage Information @@ -261,7 +261,7 @@ Partition Parameters: numFiles 2 numRows 1000 rawDataSize 208000 - totalSize 3950 + totalSize 3980 #### A masked pattern was here #### # Storage Information @@ -386,7 +386,7 @@ Partition Parameters: numFiles 4 numRows 1000 rawDataSize 208000 - totalSize 7904 + totalSize 7960 #### A masked pattern was here #### # Storage Information @@ -433,7 +433,7 @@ Partition Parameters: numFiles 4 numRows 2000 rawDataSize 416000 - totalSize 7904 + totalSize 7960 #### A masked pattern was here #### # Storage Information @@ -608,6 +608,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -707,21 +709,55 @@ PREHOOK: type: QUERY POSTHOOK: query: explain select count(*) from acid where ds='2008-04-08' POSTHOOK: type: QUERY STAGE DEPENDENCIES: - Stage-0 is a root stage + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: acid + Statistics: Num rows: 1000 Data size: 176000 Basic stats: COMPLETE Column stats: NONE + Select Operator + Statistics: Num rows: 1000 Data size: 176000 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 Processor Tree: ListSink PREHOOK: query: select count(*) from acid where ds='2008-04-08' PREHOOK: type: QUERY PREHOOK: Input: default@acid +PREHOOK: Input: default@acid@ds=2008-04-08 #### A masked pattern was here #### POSTHOOK: query: select count(*) from acid where ds='2008-04-08' POSTHOOK: type: QUERY POSTHOOK: Input: default@acid +POSTHOOK: Input: default@acid@ds=2008-04-08 #### A masked pattern was here #### 1000 diff --git ql/src/test/results/clientpositive/autoColumnStats_4.q.out ql/src/test/results/clientpositive/autoColumnStats_4.q.out index b3df04fc9a..68d7094f54 100644 --- ql/src/test/results/clientpositive/autoColumnStats_4.q.out +++ ql/src/test/results/clientpositive/autoColumnStats_4.q.out @@ -197,7 +197,7 @@ Table Parameters: numFiles 2 numRows 0 rawDataSize 0 - totalSize 1798 + totalSize 1852 transactional true transactional_properties default #### A masked pattern was here #### @@ -241,7 +241,7 @@ Table Parameters: numFiles 4 numRows 0 rawDataSize 0 - totalSize 2909 + totalSize 3033 transactional true transactional_properties default #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/auto_join26.q.out ql/src/test/results/clientpositive/auto_join26.q.out index 91d79857c2..5b2b800fe8 100644 --- ql/src/test/results/clientpositive/auto_join26.q.out +++ ql/src/test/results/clientpositive/auto_join26.q.out @@ -99,31 +99,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 - Select Operator - expressions: _col0 (type: int), _col1 (type: int) - outputColumnNames: key, cnt - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 848 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out index 688fdfa125..3faef6f951 100644 --- ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out +++ ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out @@ -206,6 +206,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -229,6 +231,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl1 @@ -256,6 +260,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -279,6 +285,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl2 @@ -306,6 +314,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -329,6 +339,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl3 @@ -562,6 +574,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -585,6 +599,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl1 @@ -612,6 +628,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -635,6 +653,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl2 @@ -662,6 +682,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -685,6 +707,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl3 diff --git ql/src/test/results/clientpositive/constprog_type.q.out ql/src/test/results/clientpositive/constprog_type.q.out index 27ef1f482b..6dd0d35b86 100644 --- ql/src/test/results/clientpositive/constprog_type.q.out +++ ql/src/test/results/clientpositive/constprog_type.q.out @@ -46,25 +46,24 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + Write Type: INSERT Stage: Stage-7 Conditional Operator Stage: Stage-4 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + micromanaged table: true Stage: Stage-2 Stats Work @@ -95,10 +94,7 @@ STAGE PLANS: name: default.dest1 Stage: Stage-6 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp) diff --git ql/src/test/results/clientpositive/create_like_view.q.out ql/src/test/results/clientpositive/create_like_view.q.out index 25b927bbd3..876df97876 100644 --- ql/src/test/results/clientpositive/create_like_view.q.out +++ ql/src/test/results/clientpositive/create_like_view.q.out @@ -52,6 +52,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -114,6 +116,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -296,6 +300,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/describe_comment_nonascii.q.out ql/src/test/results/clientpositive/describe_comment_nonascii.q.out index a4d512737a..22a34ed641 100644 --- ql/src/test/results/clientpositive/describe_comment_nonascii.q.out +++ ql/src/test/results/clientpositive/describe_comment_nonascii.q.out @@ -54,6 +54,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out index b8113eef10..a2312f4514 100644 --- ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out +++ ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out @@ -174,6 +174,8 @@ STAGE PLANS: serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 7060 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -195,6 +197,8 @@ STAGE PLANS: serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 7060 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.uservisits_web_text_none diff --git ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out index 884e63c44d..ca588138e6 100644 --- ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out +++ ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out @@ -54,6 +54,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.non_acid + Write Type: INSERT Stage: Stage-0 Move Operator @@ -67,6 +68,8 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.non_acid + Write Type: INSERT + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/fouter_join_ppr.q.out ql/src/test/results/clientpositive/fouter_join_ppr.q.out index 55d2a4d04b..d8bb3c89cc 100644 --- ql/src/test/results/clientpositive/fouter_join_ppr.q.out +++ ql/src/test/results/clientpositive/fouter_join_ppr.q.out @@ -90,6 +90,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -111,6 +113,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -159,6 +163,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -207,6 +213,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -384,6 +392,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -405,6 +415,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -453,6 +465,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -501,6 +515,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -682,6 +698,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -703,6 +721,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -751,6 +771,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -799,6 +821,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -976,6 +1000,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -997,6 +1023,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -1045,6 +1073,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1093,6 +1123,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git ql/src/test/results/clientpositive/groupby2_noskew.q.out ql/src/test/results/clientpositive/groupby2_noskew.q.out index 29a71f1710..716cdc008a 100644 --- ql/src/test/results/clientpositive/groupby2_noskew.q.out +++ ql/src/test/results/clientpositive/groupby2_noskew.q.out @@ -55,26 +55,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 - Select Operator - expressions: _col0 (type: string), _col1 (type: int), _col2 (type: string) - outputColumnNames: key, c1, c2 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out index f1ce8388d7..ef1f0e2ca7 100644 --- ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out +++ ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out @@ -56,26 +56,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 - Select Operator - expressions: _col0 (type: string), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int) - outputColumnNames: key, c1, c2, c3, c4 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out index 9a6457cef2..2d76a19f65 100644 --- ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out +++ ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out @@ -94,21 +94,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator @@ -119,6 +105,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 + micromanaged table: true Stage: Stage-3 Stats Work @@ -182,21 +169,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-1 Move Operator @@ -207,6 +180,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 + micromanaged table: true Stage: Stage-7 Map Reduce diff --git ql/src/test/results/clientpositive/groupby_rollup1.q.out ql/src/test/results/clientpositive/groupby_rollup1.q.out index 5ccf8f2285..67dcc0f70b 100644 --- ql/src/test/results/clientpositive/groupby_rollup1.q.out +++ ql/src/test/results/clientpositive/groupby_rollup1.q.out @@ -487,31 +487,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int) - outputColumnNames: key1, key2, val - Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key1, 'hll'), compute_stats(key2, 'hll'), compute_stats(val, 'hll') - mode: hash - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1 Data size: 1312 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 + micromanaged table: true Stage: Stage-4 Stats Work @@ -605,31 +592,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int) - outputColumnNames: key1, key2, val - Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key1, 'hll'), compute_stats(key2, 'hll'), compute_stats(val, 'hll') - mode: hash - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1 Data size: 1312 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 + micromanaged table: true Stage: Stage-9 Map Reduce diff --git ql/src/test/results/clientpositive/input15.q.out ql/src/test/results/clientpositive/input15.q.out index 13bdbf21e6..a63b510590 100644 --- ql/src/test/results/clientpositive/input15.q.out +++ ql/src/test/results/clientpositive/input15.q.out @@ -17,6 +17,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.TEST15 + table properties: + transactional true + transactional_properties insert_only PREHOOK: query: CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientpositive/input30.q.out ql/src/test/results/clientpositive/input30.q.out index 84e4ae4786..fdf64c2d56 100644 --- ql/src/test/results/clientpositive/input30.q.out +++ ql/src/test/results/clientpositive/input30.q.out @@ -64,36 +64,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tst_dest30 - Select Operator - expressions: _col0 (type: int) - outputColumnNames: a - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: compute_stats(a, 'hll') - mode: complete - outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - Select Operator - expressions: _col0 (type: struct) - outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - File Output Operator - compressed: false - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tst_dest30 + micromanaged table: true Stage: Stage-2 Stats Work @@ -172,36 +154,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest30 - Select Operator - expressions: _col0 (type: int) - outputColumnNames: a - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: compute_stats(a, 'hll') - mode: complete - outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - Select Operator - expressions: _col0 (type: struct) - outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - File Output Operator - compressed: false - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest30 + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/inputddl1.q.out ql/src/test/results/clientpositive/inputddl1.q.out index 12d655dec5..59f2690082 100644 --- ql/src/test/results/clientpositive/inputddl1.q.out +++ ql/src/test/results/clientpositive/inputddl1.q.out @@ -16,6 +16,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.INPUTDDL1 + table properties: + transactional true + transactional_properties insert_only PREHOOK: query: CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientpositive/inputddl2.q.out ql/src/test/results/clientpositive/inputddl2.q.out index fbb2ecebf0..c142a50986 100644 --- ql/src/test/results/clientpositive/inputddl2.q.out +++ ql/src/test/results/clientpositive/inputddl2.q.out @@ -17,6 +17,9 @@ STAGE PLANS: partition columns: ds string, country string serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.INPUTDDL2 + table properties: + transactional true + transactional_properties insert_only PREHOOK: query: CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientpositive/rcfile_null_value.q.out ql/src/test/results/clientpositive/rcfile_null_value.q.out index 2d2bef9be5..2efa6fc54f 100644 --- ql/src/test/results/clientpositive/rcfile_null_value.q.out +++ ql/src/test/results/clientpositive/rcfile_null_value.q.out @@ -150,31 +150,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.dest1_rc - Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: int), _col3 (type: string) - outputColumnNames: c1, c2, c3, c4 - Statistics: Num rows: 60 Data size: 642 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(c1, 'hll'), compute_stats(c2, 'hll'), compute_stats(c3, 'hll'), compute_stats(c4, 'hll') - mode: hash - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 1728 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.dest1_rc + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/sample8.q.out ql/src/test/results/clientpositive/sample8.q.out index 365b7cfa31..686f3bdd04 100644 --- ql/src/test/results/clientpositive/sample8.q.out +++ ql/src/test/results/clientpositive/sample8.q.out @@ -97,6 +97,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -145,6 +147,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -193,6 +197,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -241,6 +247,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git ql/src/test/results/clientpositive/show_create_table_delimited.q.out ql/src/test/results/clientpositive/show_create_table_delimited.q.out index 9fb35c5dda..0552f952d4 100644 --- ql/src/test/results/clientpositive/show_create_table_delimited.q.out +++ ql/src/test/results/clientpositive/show_create_table_delimited.q.out @@ -39,6 +39,8 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'transactional'='true', + 'transactional_properties'='insert_only', #### A masked pattern was here #### PREHOOK: query: DROP TABLE tmp_showcrt1 PREHOOK: type: DROPTABLE diff --git ql/src/test/results/clientpositive/stats1.q.out ql/src/test/results/clientpositive/stats1.q.out index 461d27ee73..83abd99de9 100644 --- ql/src/test/results/clientpositive/stats1.q.out +++ ql/src/test/results/clientpositive/stats1.q.out @@ -75,19 +75,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 26 Data size: 7072 Basic stats: COMPLETE Column stats: PARTIAL - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - value expressions: _col0 (type: struct), _col1 (type: struct) + Write Type: INSERT TableScan alias: s2 Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE @@ -105,42 +93,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 26 Data size: 7072 Basic stats: COMPLETE Column stats: PARTIAL - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - value expressions: _col0 (type: struct), _col1 (type: struct) - Reduce Operator Tree: - Group By Operator - aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1) - mode: mergepartial - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - File Output Operator - compressed: false - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + micromanaged table: true Stage: Stage-3 Stats Work @@ -226,6 +190,8 @@ Table Parameters: numRows 26 rawDataSize 199 totalSize 225 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -267,6 +233,8 @@ Table Parameters: numRows 26 rawDataSize 199 totalSize 1583 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/stats15.q.out ql/src/test/results/clientpositive/stats15.q.out index faebe8afd8..96fef6fd10 100644 --- ql/src/test/results/clientpositive/stats15.q.out +++ ql/src/test/results/clientpositive/stats15.q.out @@ -46,6 +46,8 @@ Table Parameters: numRows 500 rawDataSize 5312 totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -148,6 +150,8 @@ Table Parameters: numRows 1500 rawDataSize 15936 totalSize 17436 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -284,6 +288,8 @@ Table Parameters: numRows 1500 rawDataSize 15936 totalSize 17436 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/transform_ppr1.q.out ql/src/test/results/clientpositive/transform_ppr1.q.out index b0c23931b2..ffd050c216 100644 --- ql/src/test/results/clientpositive/transform_ppr1.q.out +++ ql/src/test/results/clientpositive/transform_ppr1.q.out @@ -105,6 +105,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -153,6 +155,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -201,6 +205,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -249,6 +255,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git ql/src/test/results/clientpositive/union10.q.out ql/src/test/results/clientpositive/union10.q.out index 0bb9ccd1e5..ac9188b2be 100644 --- ql/src/test/results/clientpositive/union10.q.out +++ ql/src/test/results/clientpositive/union10.q.out @@ -27,7 +27,7 @@ STAGE DEPENDENCIES: Stage-2 depends on stages: Stage-1, Stage-9, Stage-10 Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6 Stage-5 - Stage-0 depends on stages: Stage-5, Stage-4, Stage-7 + Stage-0 depends on stages: Stage-5, Stage-4, Stage-7, Stage-2 Stage-3 depends on stages: Stage-0 Stage-4 Stage-6 @@ -88,19 +88,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: key, value - Statistics: Num rows: 3 Data size: 276 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: struct), _col1 (type: struct) + Write Type: INSERT TableScan Union Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE @@ -116,19 +104,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: key, value - Statistics: Num rows: 3 Data size: 276 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: struct), _col1 (type: struct) + Write Type: INSERT TableScan Union Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE @@ -144,51 +120,24 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: key, value - Statistics: Num rows: 3 Data size: 276 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: struct), _col1 (type: struct) - Reduce Operator Tree: - Group By Operator - aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1) - mode: mergepartial - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE - File Output Operator - compressed: false - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Write Type: INSERT Stage: Stage-8 Conditional Operator Stage: Stage-5 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + micromanaged table: true Stage: Stage-3 Stats Work @@ -223,10 +172,7 @@ STAGE PLANS: name: default.tmptable Stage: Stage-7 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-9 Map Reduce diff --git ql/src/test/results/clientpositive/union31.q.out ql/src/test/results/clientpositive/union31.q.out index 6de0b40f29..3618529b11 100644 --- ql/src/test/results/clientpositive/union31.q.out +++ ql/src/test/results/clientpositive/union31.q.out @@ -183,31 +183,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: key, cnt - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 + micromanaged table: true Stage: Stage-3 Stats Work @@ -276,31 +263,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t4 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: value, cnt - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(value, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t4 + micromanaged table: true Stage: Stage-7 Map Reduce @@ -514,21 +488,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t5 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: c1, cnt - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(c1, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Group By Operator aggregations: sum(VALUE._col0) keys: KEY._col0 (type: string) @@ -547,31 +507,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t6 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: c1, cnt - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(c1, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t5 + micromanaged table: true Stage: Stage-4 Stats Work @@ -614,12 +561,13 @@ STAGE PLANS: Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t6 + micromanaged table: true Stage: Stage-7 Map Reduce @@ -926,21 +874,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t7 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: c1, cnt - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(c1, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Group By Operator aggregations: count(1) keys: KEY._col0 (type: string) @@ -959,31 +893,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t8 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: c1, cnt - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(c1, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t7 + micromanaged table: true Stage: Stage-4 Stats Work @@ -1026,12 +947,13 @@ STAGE PLANS: Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t8 + micromanaged table: true Stage: Stage-7 Map Reduce diff --git ql/src/test/results/clientpositive/union6.q.out ql/src/test/results/clientpositive/union6.q.out index ddfd54fd2e..43ea52195b 100644 --- ql/src/test/results/clientpositive/union6.q.out +++ ql/src/test/results/clientpositive/union6.q.out @@ -23,7 +23,7 @@ STAGE DEPENDENCIES: Stage-2 depends on stages: Stage-1 Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6 Stage-5 - Stage-0 depends on stages: Stage-5, Stage-4, Stage-7 + Stage-0 depends on stages: Stage-5, Stage-4, Stage-7, Stage-2 Stage-3 depends on stages: Stage-0 Stage-4 Stage-6 @@ -78,19 +78,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 26 Data size: 7072 Basic stats: COMPLETE Column stats: PARTIAL - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - value expressions: _col0 (type: struct), _col1 (type: struct) + Write Type: INSERT TableScan alias: s2 Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE @@ -108,51 +96,24 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 26 Data size: 7072 Basic stats: COMPLETE Column stats: PARTIAL - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - value expressions: _col0 (type: struct), _col1 (type: struct) - Reduce Operator Tree: - Group By Operator - aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1) - mode: mergepartial - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - File Output Operator - compressed: false - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Write Type: INSERT Stage: Stage-8 Conditional Operator Stage: Stage-5 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + micromanaged table: true Stage: Stage-3 Stats Work @@ -187,10 +148,7 @@ STAGE PLANS: name: default.tmptable Stage: Stage-7 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection PREHOOK: query: insert overwrite table tmptable select unionsrc.key, unionsrc.value FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1 diff --git ql/src/test/results/clientpositive/union_remove_1.q.out ql/src/test/results/clientpositive/union_remove_1.q.out index 2e1c7e357c..bbada7b6ff 100644 --- ql/src/test/results/clientpositive/union_remove_1.q.out +++ ql/src/test/results/clientpositive/union_remove_1.q.out @@ -83,16 +83,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 + micromanaged table: true Stage: Stage-2 Map Reduce @@ -131,6 +133,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 + Write Type: INSERT PREHOOK: query: insert overwrite table outputTbl1 SELECT * @@ -173,6 +176,8 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 2 totalSize 40 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/vector_decimal_6.q.out ql/src/test/results/clientpositive/vector_decimal_6.q.out index 8896459f73..fbca6d40eb 100644 --- ql/src/test/results/clientpositive/vector_decimal_6.q.out +++ ql/src/test/results/clientpositive/vector_decimal_6.q.out @@ -564,12 +564,14 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.DECIMAL_6_3 + Write Type: INSERT Stage: Stage-0 Move Operator files: hdfs directory: true #### A masked pattern was here #### + Write Type: INSERT Stage: Stage-3 Create Table Operator: @@ -579,6 +581,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde name: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.DECIMAL_6_3 + table properties: + transactional true + transactional_properties insert_only Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/vector_varchar_4.q.out ql/src/test/results/clientpositive/vector_varchar_4.q.out index 00a82c3813..957be0bcb5 100644 --- ql/src/test/results/clientpositive/vector_varchar_4.q.out +++ ql/src/test/results/clientpositive/vector_varchar_4.q.out @@ -170,6 +170,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe name: default.varchar_lazy_binary_columnar + Write Type: INSERT Execution mode: vectorized Map Vectorization: enabled: true @@ -185,20 +186,18 @@ STAGE PLANS: Conditional Operator Stage: Stage-4 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe name: default.varchar_lazy_binary_columnar + micromanaged table: true Stage: Stage-2 Stats Work @@ -219,8 +218,5 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat Stage: Stage-6 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection diff --git service/src/test/org/apache/hive/service/cli/CLIServiceTest.java service/src/test/org/apache/hive/service/cli/CLIServiceTest.java index bc6648e408..410cf3e7dc 100644 --- service/src/test/org/apache/hive/service/cli/CLIServiceTest.java +++ service/src/test/org/apache/hive/service/cli/CLIServiceTest.java @@ -156,7 +156,7 @@ public void testExecuteStatement() throws Exception { OperationHandle opHandle; String queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + + " = true"; opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); client.closeOperation(opHandle); @@ -213,7 +213,7 @@ public void testExecuteStatementAsync() throws Exception { // Change lock manager, otherwise unit-test doesn't go through queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + + " = true"; opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); client.closeOperation(opHandle); @@ -499,7 +499,7 @@ private SessionHandle openSession(Map confOverlay) SessionState.get().setIsHiveServerQuery(true); // Pretend we are in HS2. String queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + + " = true"; client.executeStatement(sessionHandle, queryString, confOverlay); return sessionHandle; } @@ -587,8 +587,8 @@ public void testConfOverlay() throws Exception { String tabName = "TEST_CONF_EXEC"; String tabNameVar = "tabNameVar"; - String setLockMgr = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + String setLockMgr = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + + " = true"; OperationHandle opHandle = client.executeStatement(sessionHandle, setLockMgr, null); client.closeOperation(opHandle); diff --git service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java index c4f5451d9d..dd2f491cbf 100644 --- service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java +++ service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java @@ -47,7 +47,6 @@ @Test public void testQueryInfoInHookContext() throws IllegalAccessException, ClassNotFoundException, InstantiationException, HiveSQLException { HiveConf conf = new HiveConf(TestQueryHooks.class); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.set(HiveConf.ConfVars.HIVE_QUERY_LIFETIME_HOOKS.varname, QueryInfoVerificationHook.class.getName()); diff --git service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java index 646159f1e4..c60ede7c02 100644 --- service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java +++ service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java @@ -74,7 +74,6 @@ public void setup() throws Exception { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_SESSION_CHECK_INTERVAL, "3s"); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); conf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, false); MetricsFactory.init(conf); diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index b46cc38a22..835606f15f 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -835,7 +835,7 @@ public static ConfVars getMetaConf(String name) { "no transactions."), // Metastore always support concurrency, but certain ACID tests depend on this being set. We // need to do the work to detangle this - HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", "hive.support.concurrency", false, + HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", "hive.support.concurrency", true, "Whether Hive supports concurrency control or not. \n" + "A ZooKeeper instance must be up and running when using zookeeper Hive lock manager "),