diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 3be5a8dee4..efc5c05e08 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1838,7 +1838,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "filter operators."), // Concurrency - HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", false, + HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", true, "Whether Hive supports concurrency control or not. \n" + "A ZooKeeper instance must be up and running when using zookeeper Hive lock manager "), HIVE_LOCK_MANAGER("hive.lock.manager", "org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager", ""), @@ -1887,7 +1887,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal // Transactions HIVE_TXN_MANAGER("hive.txn.manager", - "org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager", + "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager", "Set to org.apache.hadoop.hive.ql.lockmgr.DbTxnManager as part of turning on Hive\n" + "transactions, which also requires appropriate settings for hive.compactor.initiator.on,\n" + "hive.compactor.worker.threads, hive.support.concurrency (true),\n" + @@ -2971,7 +2971,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal + "When it is set to false, only [a-zA-Z_0-9]+ are supported.\n" + "The only supported special character right now is '/'. This flag applies only to quoted table names.\n" + "The default value is true."), - HIVE_CREATE_TABLES_AS_INSERT_ONLY("hive.create.as.insert.only", false, + HIVE_CREATE_TABLES_AS_INSERT_ONLY("hive.create.as.insert.only", true, "Whether the eligible tables should be created as ACID insert-only by default. Does \n" + "not apply to external tables, the ones using storage handlers, etc."), // role names are case-insensitive diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java index e5aa2b8d7a..eebf5b12aa 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java @@ -96,7 +96,6 @@ protected void setUp() throws Exception { hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hcatConf.setTimeVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 60, TimeUnit.SECONDS); - hcatConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); clientWH = new Warehouse(hcatConf); msc = new HiveMetaStoreClient(hcatConf); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java index 1606982574..0f5b45dcfd 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java @@ -43,7 +43,6 @@ protected void setUp() throws Exception { HiveConf hcatConf = new HiveConf(this.getClass()); hcatConf.set(ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(ConfVars.POSTEXECHOOKS.varname, ""); - hcatConf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName()); hcatDriver = new Driver(hcatConf); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java index c77bc48e6c..1e9e6f0d60 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java @@ -235,8 +235,6 @@ public LocalMetaServer() { HCatSemanticAnalyzer.class.getName()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); } diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java index ff56234cc1..9ffa88bc8b 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java @@ -47,7 +47,7 @@ public static Driver instantiateDriver(MiniCluster cluster) { } hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); LOG.debug("Hive conf : {}", hiveConf.getAllProperties()); Driver driver = new Driver(hiveConf); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java index f78eb15622..f64dffaa0f 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java @@ -79,7 +79,7 @@ protected void setUpHiveConf() { hiveConf = new HiveConf(this.getClass()); hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, ""); hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, ""); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, TEST_WAREHOUSE_DIR); hiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); hiveConf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java index ba42ffd627..2b54342162 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java @@ -189,7 +189,6 @@ private static void initializeSetup() throws Exception { HCatSemanticAnalyzer.class.getName()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java index 6711a3edec..cbc0f3ef1f 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java @@ -122,8 +122,6 @@ public static void setup() throws Exception { HCatSemanticAnalyzer.class.getName()); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - "false"); msc = new HiveMetaStoreClient(hcatConf); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java index 31857bf643..b2a3448b89 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java @@ -62,7 +62,6 @@ public void Initialize() throws Exception { hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java index 4a6c6a3a6f..daa09e66f0 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java @@ -86,7 +86,6 @@ public void setUp() throws Exception { HiveConf hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java index ea9cdda31c..5652929b85 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java @@ -120,7 +120,6 @@ public static void setUpBeforeClass() throws Exception { HiveConf hiveConf = new HiveConf(TestHCatLoaderComplexSchema.class); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java index 496f3c85da..ca2ec60759 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java @@ -163,7 +163,6 @@ public void setup() throws Exception { HiveConf hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java index 40ea923858..734547337e 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java @@ -107,7 +107,6 @@ public void setUp() throws Exception { HiveConf hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); hiveConf diff --git hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java index 9f7c207214..a8b70ecbcc 100644 --- hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java +++ hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java @@ -69,7 +69,6 @@ protected void setUp() throws Exception { hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.METASTOREURIS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); hiveConf.set(HCatConstants.HCAT_MSGBUS_TOPIC_PREFIX, "planetlab.hcat"); diff --git hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java index 49aad392d8..4dc9f1cfbc 100644 --- hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java +++ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java @@ -206,7 +206,6 @@ public TestStreaming() throws Exception { conf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); } conf.setBoolVar(HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI, true); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); dbFolder.create(); diff --git hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java index 78e767e7fc..19f64c8529 100644 --- hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java +++ hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java @@ -128,8 +128,6 @@ public static void startMetaStoreServer() throws Exception { HCatSemanticAnalyzer.class.getName()); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); } diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java index ad44bc2d62..90607d332f 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java @@ -299,7 +299,7 @@ private void setUpMetastore() throws Exception { //is present only in the ql/test directory hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, "jdbc:derby:" + new File(workDir + "/metastore_db") + ";create=true"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.toString(), diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java index 120b4af826..d1da0f3f75 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java @@ -78,7 +78,6 @@ public void Initialize() throws Exception { URI fsuri = getFileSystem().getUri(); Path whPath = new Path(fsuri.getScheme(), fsuri.getAuthority(), getTestDir()); - hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hcatConf.set(ConfVars.METASTOREWAREHOUSE.varname, whPath.toString()); diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java index 308ab0d36a..8f9896c3f9 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java @@ -237,7 +237,6 @@ public static void connectToMetastore() throws Exception { DbNotificationListener.class.getName()); conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS, MockMetaStoreEventListener.class.getName()); conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_DB_LISTENER_TTL, String.valueOf(EVENTS_TTL) + "s"); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.FIRE_EVENTS_FOR_DML, true); conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL, DummyRawStoreFailEvent.class.getName()); diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java index d2dbe8f287..9db93944d5 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java @@ -74,7 +74,6 @@ public void setup() throws Exception { hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, warehouseDir); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java index 0f0ae547f3..eef43744a2 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java @@ -55,7 +55,7 @@ public static void beforeTestBase() throws Exception { hiveConf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); hiveConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java index 1cd0ee8842..c968d0c825 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java @@ -54,7 +54,6 @@ public static void setUpBeforeClass() throws Exception { confOverlay.put(ConfVars.PREEXECHOOKS.varname, PreExecHook.class.getName()); confOverlay.put(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, SemanticAnalysisHook.class.getName()); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "" + Boolean.FALSE); HiveConf hiveConf = new HiveConf(); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java index 5e70d68803..ca104d8af9 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java @@ -55,7 +55,6 @@ public static void beforeTest() throws Exception { // set a small time unit as cookie max age so that the server sends a 401 hiveConf.setTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_COOKIE_MAX_AGE, 1, TimeUnit.SECONDS); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); miniHS2.start(new HashMap()); diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java index 3153b9f9c0..cdbf6bd075 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java @@ -47,7 +47,6 @@ public static void beforeTest() throws Exception { HiveConf hiveConf = new HiveConf(); SSLTestUtils.setMetastoreSslConf(hiveConf); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java index 7f2517b1b8..3fe14c206b 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java @@ -146,7 +146,6 @@ public static void setUpBeforeClass() throws Exception { PostExecHook.class.getName()); hiveConf.setVar(ConfVars.SEMANTIC_ANALYZER_HOOK, SemanticAnalysisHook.class.getName()); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java index bd8332c49c..e2f63a7483 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java @@ -63,7 +63,7 @@ protected static void setup() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); msc = new HiveMetaStoreClient(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java index 8366fabf8a..2c01ffae6c 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java @@ -171,7 +171,6 @@ public static void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.setVar(ConfVars.METASTORE_FILTER_HOOK, DummyMetaStoreFilterHookImpl.class.getName()); UtilsForTest.setNewDerbyDbLocation(hiveConf, TestFilterHooks.class.getSimpleName()); int port = MetaStoreTestUtils.startMetaStoreWithRetry(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java index 4b95fe7be6..8765f6d469 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java @@ -53,7 +53,6 @@ public static void startServices() throws Exception { HiveConf hiveConf = new HiveConf(); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 2); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 2); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder().withMiniMR().withRemoteMetastore().withConf(hiveConf).build(); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java index 5267dcaa5f..b2a3df1a1f 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java @@ -80,7 +80,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java index 7cf351fb35..464a0eff93 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java @@ -52,7 +52,6 @@ protected void setUp() throws Exception { hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); } diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java index 3bb340e157..6eaa7e57e3 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java @@ -54,7 +54,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java index 7219774560..d4c8298d85 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java @@ -103,7 +103,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java index a9053241cc..bf891b0445 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java @@ -57,7 +57,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java index 3d58819b1a..3068186992 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java @@ -47,7 +47,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java index 9d241a89c5..d96480271f 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java @@ -43,7 +43,6 @@ public static void before() throws Exception { hiveConf = new HiveConf(TestMetaStoreMetrics.class); hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_METRICS, true); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java index 179eed95d0..b297bbc270 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java @@ -83,7 +83,6 @@ protected void setUp() throws Exception { int port = MetaStoreTestUtils.startMetaStoreWithRetry(); conf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); conf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - conf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, new URI(tmppath + "/warehouse").getPath()); // Initialize second mocked filesystem (implement only necessary stuff) diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java index e78318035a..276f5e36b5 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java @@ -68,7 +68,6 @@ public static void beforeTest() throws Exception { conf = new HiveConf(); DriverManager.setLoginTimeout(0); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setIntVar(HiveConf.ConfVars.METASTORE_LIMIT_PARTITION_REQUEST, PARTITION_REQUEST_LIMIT); conf.setBoolVar(HiveConf.ConfVars.METASTORE_INTEGER_JDO_PUSHDOWN, true); conf.setBoolVar(HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL, true); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java index d73cd6426c..febe06868a 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java @@ -80,7 +80,6 @@ protected void setUp() { } } - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // copy the test files into hadoop if required. int i = 0; diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java index 2170ca3706..a22beb5342 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java @@ -35,7 +35,6 @@ public void testHookLoading() throws Exception{ HiveConf conf = new HiveConf(this.getClass()); conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, DummySemanticAnalyzerHook.class.getName()); - conf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(conf); Driver driver = new Driver(conf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index 585411664c..2728eb9a59 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -146,7 +146,6 @@ public static void setUpBeforeClass() throws Exception { hconf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hconf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hconf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hconf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hconf.set(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL.varname, "org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore"); hconf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java index cde7a3e33c..a4be42b88f 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java @@ -121,7 +121,7 @@ private void initialize(String cmRoot, String warehouseRoot, hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java index dc3af3c186..e51b55d573 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java @@ -78,7 +78,7 @@ public void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java index 6a668aa40c..2639f58703 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java @@ -66,7 +66,6 @@ protected void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java index 57ff8c9ae7..39969f789a 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java @@ -76,7 +76,6 @@ protected void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java index 871cf3db76..41e56689e5 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java @@ -105,7 +105,6 @@ protected void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java index 2059370fd4..e7378a2051 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java @@ -63,7 +63,6 @@ public static void setUp() throws Exception { clientHiveConf = new HiveConf(); clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(clientHiveConf)); msc = new HiveMetaStoreClient(clientHiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java index 19694b093e..30337510e6 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java @@ -97,7 +97,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, true); conf.setVar(ConfVars.HIVE_TXN_MANAGER, DbTxnManager.class.getName()); conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java index 5922a8c603..758df3ab92 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java @@ -119,7 +119,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); UtilsForTest.setNewDerbyDbLocation(conf, TestHiveAuthorizerShowFilters.class.getSimpleName()); SessionState.start(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java index 2fb64536a6..3738aae3a7 100644 --- itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java +++ itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java @@ -161,7 +161,7 @@ public void before() throws Exception { hiveConf = new HiveConf(); miniHS2 = getNewMiniHS2(); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, "binary"); } diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java index 235e6c36ed..a94f883c84 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java @@ -64,7 +64,7 @@ public static void beforeTest() throws Exception { System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setVar(ConfVars.HIVE_SERVER2_TEZ_DEFAULT_QUEUES, "default"); conf.setTimeVar(ConfVars.HIVE_TRIGGER_VALIDATION_INTERVAL_MS, 100, TimeUnit.MILLISECONDS); @@ -169,4 +169,4 @@ WMTrigger wmTriggerFromTrigger(Trigger trigger) { result.setActionExpression(trigger.getAction().toString()); return result; } -} \ No newline at end of file +} diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java index dd24f0261f..c97d0df636 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java @@ -82,12 +82,10 @@ private static HiveConf createHiveConf() { public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); conf = createHiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); DriverManager.setLoginTimeout(0); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2(conf, MiniClusterType.MR); Map overlayProps = new HashMap(); overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java index 84644d1d89..5518c32ba7 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java @@ -68,12 +68,10 @@ public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLExcepti public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); DriverManager.setLoginTimeout(0); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder().withConf(conf).withMiniMR().withHA().build(); Map overlayProps = new HashMap(); overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index 70bd29c517..48a518c66d 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -202,7 +202,6 @@ private static void startMiniHS2(HiveConf conf) throws Exception { } private static void startMiniHS2(HiveConf conf, boolean httpMode) throws Exception { - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false); conf.setBoolVar(ConfVars.HIVESTATSCOLAUTOGATHER, false); MiniHS2.Builder builder = new MiniHS2.Builder().withConf(conf).cleanupLocalDirOnStartup(false); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java index 71aee8f959..0e7ba4b7bb 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java @@ -116,7 +116,6 @@ public static void beforeTest() throws Exception { } conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.addResource(new URL("file://" + new File(confDir).toURI().getPath() diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java index 2156f4b4de..bbb82c29ed 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java @@ -91,12 +91,10 @@ private static HiveConf createHiveConf() { public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); conf = createHiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); DriverManager.setLoginTimeout(0); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2(conf, MiniClusterType.MR); Map overlayProps = new HashMap(); overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java index 84f1168b28..ef578d8844 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java @@ -59,7 +59,6 @@ public void run(HiveSessionHookContext sessionHookContext) public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); HiveConf conf = new HiveConf(); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setVar(ConfVars.HIVE_SERVER2_SESSION_HOOK, NoSaslSessionHook.class.getName()); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java index dc59f4b7fb..842ab8abc3 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java @@ -56,7 +56,6 @@ public static void beforeTest() throws Exception { zkServer = new TestingServer(); Class.forName(MiniHS2.getJdbcDriverName()); hiveConf = new HiveConf(); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // Set up zookeeper dynamic service discovery configs enableZKServiceDiscoveryConfigs(hiveConf); dataFileDir = hiveConf.get("test.data.files").replace('\\', '/').replace("c:", ""); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java index a983855667..2c58ac51fc 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java @@ -64,7 +64,6 @@ public static void beforeTest() throws Exception { System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setTimeVar(ConfVars.HIVE_TRIGGER_VALIDATION_INTERVAL_MS, 100, TimeUnit.MILLISECONDS); conf.setVar(ConfVars.HIVE_SERVER2_TEZ_INTERACTIVE_QUEUE, "default"); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java index 285e533fe6..fa3313c409 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java @@ -49,7 +49,6 @@ public static void beforeTest() throws Exception { System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setTimeVar(ConfVars.HIVE_TRIGGER_VALIDATION_INTERVAL_MS, 100, TimeUnit.MILLISECONDS); conf.setVar(ConfVars.HIVE_SERVER2_TEZ_INTERACTIVE_QUEUE, "default"); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java index 88a403a0d5..ce6ea2c18c 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java @@ -66,7 +66,6 @@ public static void afterClass() throws IOException { private void initHS2(boolean enableXSRFFilter) throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); HiveConf conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder().withConf(conf).cleanupLocalDirOnStartup(false).build(); dataFileDir = conf.get("test.data.files").replace('\\', '/').replace("c:", ""); kvDataFilePath = new Path(dataFileDir, "kv1.txt"); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java index 9a44dfab1d..ab4dc70242 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java @@ -63,7 +63,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // once SessionState for thread is set, CliDriver picks conf from it CliSessionState ss = new CliSessionState(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java index 273ec36d81..fc3e63a40b 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java @@ -78,7 +78,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java index eb29e228e1..622fb23c1a 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java @@ -64,7 +64,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java index 692bfa0d89..64ecbdc567 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java @@ -106,7 +106,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, TestAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java index 5070c765c9..a2e6c4a8d0 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java @@ -48,7 +48,6 @@ public void startHS2(HiveConf conf) throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, SQLStdHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java index 5e653ec75c..b5656a009f 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java @@ -50,7 +50,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, SQLStdHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java index d176db4279..d3894411bf 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java @@ -61,7 +61,7 @@ public void testRunCluster() throws Exception { break; } HiveConf conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_RPC_QUERY_PLAN, true); for (; idx < confFiles.length; ++idx) { diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java index d8c6beaee4..4218f99fc8 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java @@ -45,7 +45,6 @@ public static void beforeTest() throws Exception { miniHS2 = new MiniHS2(new HiveConf()); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java index c4da73e208..d44e3c68ba 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java @@ -49,7 +49,6 @@ public static void beforeTest() throws Exception { @Before public void setUp() throws Exception { confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); confOverlay.put(ConfVars.HIVE_SERVER2_SESSION_CHECK_INTERVAL.varname, "3s"); confOverlay.put(ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT.varname, "3s"); miniHS2.start(confOverlay); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java index 7337e9cbee..8157abb70b 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java @@ -75,10 +75,8 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, public static void setup() throws Exception { miniHS2 = new MiniHS2(new HiveConf()); confOverlay = new HashMap(); - confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); confOverlay.put(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, MetricCheckingHook.class.getName()); confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED.varname, "true"); - confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java index 18892e77d7..8bc72769f8 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java @@ -67,7 +67,6 @@ public void testConfInSession() throws Exception { hiveConf.set(ZK_TIMEOUT_KEY, ZK_TIMEOUT); // check the config used very often! - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2(hiveConf); miniHS2.start(new HashMap()); @@ -78,8 +77,6 @@ public void testConfInSession() throws Exception { checkConfVal(DUMMY_CONF_KEY, DUMMY_CONF_KEY + "=" + DUMMY_CONF_VAL, stmt); checkConfVal(ZK_TIMEOUT_KEY, ZK_TIMEOUT_KEY + "=" + ZK_TIMEOUT, stmt); - checkConfVal(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + "=" + "false", stmt); stmt.close(); hs2Conn.close(); diff --git itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java index c58767fc75..a3bcde11ce 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java @@ -71,7 +71,6 @@ public static void startServices() throws Exception { hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 1); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 1); hiveConf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI, true); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // Setting hive.server2.enable.doAs to True ensures that HS2 performs the query operation as // the connected user instead of the user running HS2. diff --git itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java index fd89921d34..5660b9cf11 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java @@ -49,7 +49,6 @@ public static void startServices() throws Exception { hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 1); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 1); hiveConf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI, true); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder() .withMiniMR() diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java index 830ffc2bbd..b51bca8a84 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java @@ -66,7 +66,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.set(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "verbose"); miniHS2 = new MiniHS2(hiveConf); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java index 388486d970..f4963452d5 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java @@ -62,7 +62,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.setBoolVar(ConfVars.TEZ_EXEC_SUMMARY, false); miniHS2 = new MiniHS2(hiveConf, MiniClusterType.TEZ); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } } diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java index 8febe3e79f..4f685174af 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java @@ -72,7 +72,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "execution"); miniHS2 = new MiniHS2(hiveConf); confOverlay = new HashMap(); - confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java index 1911d2ce17..6b5250a819 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java @@ -137,7 +137,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NOSASL.toString()); hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, transportMode); hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH, thriftHttpPath); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); hiveConf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreGetMetaConf.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreGetMetaConf.java index f71911eb46..00eb683638 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreGetMetaConf.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreGetMetaConf.java @@ -85,7 +85,6 @@ public static void startMetaStoreServer() throws Exception { + msPort); hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, ""); hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, ""); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 10); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStorePartitionSpecs.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStorePartitionSpecs.java index 86462ff7b1..3f5a27c863 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStorePartitionSpecs.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStorePartitionSpecs.java @@ -98,8 +98,6 @@ public static void startMetaStoreServer() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - "false"); hiveConf.set(HiveConf.ConfVars.METASTORE_EXPRESSION_PROXY_CLASS.name(), MockPartitionExpressionForMetastore.class.getCanonicalName()); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java index a85713b350..449afd2810 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java @@ -1274,7 +1274,6 @@ public static boolean isFullAcidTable(CreateTableDesc td) { !AcidUtils.isInsertOnlyTable(td.getTblProps()); } - /** * Sets the acidOperationalProperties in the configuration object argument. * @param conf Mutable configuration object diff --git ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java index a89e5e0dc0..5ebc480e49 100644 --- ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java @@ -90,7 +90,6 @@ public String redactQuery(String query) { } private static Driver createDriver(HiveConf conf) { - HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); Driver driver = new Driver(conf); return driver; diff --git ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java index 4d19a715c6..71c2c0d91b 100644 --- ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java @@ -49,7 +49,6 @@ @BeforeClass public static void setUpBeforeClass() { conf = new HiveConf(TestQueryHooks.class); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); } diff --git ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java index 976d83d55f..c614a3951e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java @@ -136,7 +136,6 @@ public void testCombine() throws Exception { .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_REWORK_MAPREDWORK, true); - HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); Driver drv = new Driver(hiveConf); String tblName = "text_symlink_text"; diff --git ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java index 406bdea96a..ff3f7fd8b5 100644 --- ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java +++ ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java @@ -390,7 +390,6 @@ public void concurrencyFalse() throws Exception { HiveConf badConf = new HiveConf(); badConf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager"); - badConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); boolean sawException = false; try { TxnManagerFactory.getTxnManagerFactory().getTxnManager(badConf); diff --git ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java index 913b60c353..57c17addc3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java +++ ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java @@ -63,7 +63,6 @@ @Before public void setUp() throws Exception { - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, DummyTxnManager.class.getName()); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java index a7a76a42cb..822ff85ce3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java +++ ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java @@ -129,7 +129,6 @@ public void testMetrics() throws Exception{ conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM, "localhost"); conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT, String.valueOf(server.getPort())); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); MetricsFactory.init(conf); CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java index 7bda832a3b..078a421979 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java @@ -187,7 +187,6 @@ private static Driver createDriver() { conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); - HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS, true); SessionState.start(conf); Driver driver = new Driver(conf); diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java index 79ce2f1769..4aa0dfb944 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java @@ -183,7 +183,6 @@ private static Driver createDriver() { "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, CheckInputReadEntityDirect.class.getName()); - HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); Driver driver = new Driver(conf); return driver; diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java index 234f9796c9..d9bb6f2ffa 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java @@ -62,8 +62,6 @@ public static void onetimeSetup() throws Exception { "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, CheckInputReadEntity.class.getName()); - HiveConf - .setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); driver = new Driver(conf); } diff --git ql/src/test/results/clientpositive/acid_table_stats.q.out ql/src/test/results/clientpositive/acid_table_stats.q.out index 4c8297e110..73c4fc942a 100644 --- ql/src/test/results/clientpositive/acid_table_stats.q.out +++ ql/src/test/results/clientpositive/acid_table_stats.q.out @@ -96,7 +96,7 @@ Partition Parameters: numFiles 2 numRows 0 rawDataSize 0 - totalSize 3950 + totalSize 3980 #### A masked pattern was here #### # Storage Information @@ -134,9 +134,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid - Statistics: Num rows: 1 Data size: 3950 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 3980 Basic stats: PARTIAL Column stats: NONE Select Operator - Statistics: Num rows: 1 Data size: 3950 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 3980 Basic stats: PARTIAL Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -211,7 +211,7 @@ Partition Parameters: numFiles 2 numRows 1000 rawDataSize 208000 - totalSize 3950 + totalSize 3980 #### A masked pattern was here #### # Storage Information @@ -262,7 +262,7 @@ Partition Parameters: numFiles 2 numRows 1000 rawDataSize 208000 - totalSize 3950 + totalSize 3980 #### A masked pattern was here #### # Storage Information @@ -388,7 +388,7 @@ Partition Parameters: numFiles 4 numRows 1000 rawDataSize 208000 - totalSize 7904 + totalSize 7960 #### A masked pattern was here #### # Storage Information @@ -435,7 +435,7 @@ Partition Parameters: numFiles 4 numRows 2000 rawDataSize 416000 - totalSize 7904 + totalSize 7960 #### A masked pattern was here #### # Storage Information @@ -610,6 +610,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -709,21 +711,55 @@ PREHOOK: type: QUERY POSTHOOK: query: explain select count(*) from acid where ds='2008-04-08' POSTHOOK: type: QUERY STAGE DEPENDENCIES: - Stage-0 is a root stage + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: acid + Statistics: Num rows: 1000 Data size: 176000 Basic stats: COMPLETE Column stats: NONE + Select Operator + Statistics: Num rows: 1000 Data size: 176000 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 Processor Tree: ListSink PREHOOK: query: select count(*) from acid where ds='2008-04-08' PREHOOK: type: QUERY PREHOOK: Input: default@acid +PREHOOK: Input: default@acid@ds=2008-04-08 #### A masked pattern was here #### POSTHOOK: query: select count(*) from acid where ds='2008-04-08' POSTHOOK: type: QUERY POSTHOOK: Input: default@acid +POSTHOOK: Input: default@acid@ds=2008-04-08 #### A masked pattern was here #### 1000 diff --git ql/src/test/results/clientpositive/autoColumnStats_4.q.out ql/src/test/results/clientpositive/autoColumnStats_4.q.out index b3df04fc9a..68d7094f54 100644 --- ql/src/test/results/clientpositive/autoColumnStats_4.q.out +++ ql/src/test/results/clientpositive/autoColumnStats_4.q.out @@ -197,7 +197,7 @@ Table Parameters: numFiles 2 numRows 0 rawDataSize 0 - totalSize 1798 + totalSize 1852 transactional true transactional_properties default #### A masked pattern was here #### @@ -241,7 +241,7 @@ Table Parameters: numFiles 4 numRows 0 rawDataSize 0 - totalSize 2909 + totalSize 3033 transactional true transactional_properties default #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/auto_join26.q.out ql/src/test/results/clientpositive/auto_join26.q.out index 9b457415e1..2ff4c1db20 100644 --- ql/src/test/results/clientpositive/auto_join26.q.out +++ ql/src/test/results/clientpositive/auto_join26.q.out @@ -98,16 +98,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out index 5743944b4c..89686df38f 100644 --- ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out +++ ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out @@ -206,6 +206,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -229,6 +231,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl1 @@ -256,6 +260,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -279,6 +285,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl2 @@ -306,6 +314,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -329,6 +339,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl3 @@ -562,6 +574,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -585,6 +599,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl1 @@ -612,6 +628,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -635,6 +653,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl2 @@ -662,6 +682,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -685,6 +707,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl3 diff --git ql/src/test/results/clientpositive/constprog_type.q.out ql/src/test/results/clientpositive/constprog_type.q.out index 27ef1f482b..6dd0d35b86 100644 --- ql/src/test/results/clientpositive/constprog_type.q.out +++ ql/src/test/results/clientpositive/constprog_type.q.out @@ -46,25 +46,24 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + Write Type: INSERT Stage: Stage-7 Conditional Operator Stage: Stage-4 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + micromanaged table: true Stage: Stage-2 Stats Work @@ -95,10 +94,7 @@ STAGE PLANS: name: default.dest1 Stage: Stage-6 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp) diff --git ql/src/test/results/clientpositive/create_like_view.q.out ql/src/test/results/clientpositive/create_like_view.q.out index 25b927bbd3..876df97876 100644 --- ql/src/test/results/clientpositive/create_like_view.q.out +++ ql/src/test/results/clientpositive/create_like_view.q.out @@ -52,6 +52,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -114,6 +116,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -296,6 +300,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/describe_comment_nonascii.q.out ql/src/test/results/clientpositive/describe_comment_nonascii.q.out index a4d512737a..22a34ed641 100644 --- ql/src/test/results/clientpositive/describe_comment_nonascii.q.out +++ ql/src/test/results/clientpositive/describe_comment_nonascii.q.out @@ -54,6 +54,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out index b8113eef10..a2312f4514 100644 --- ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out +++ ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out @@ -174,6 +174,8 @@ STAGE PLANS: serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 7060 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -195,6 +197,8 @@ STAGE PLANS: serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 7060 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.uservisits_web_text_none diff --git ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out index 8199235519..ccce4e6c39 100644 --- ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out +++ ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out @@ -54,6 +54,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.non_acid + Write Type: INSERT Stage: Stage-0 Move Operator @@ -67,6 +68,8 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.non_acid + Write Type: INSERT + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/fouter_join_ppr.q.out ql/src/test/results/clientpositive/fouter_join_ppr.q.out index 55d2a4d04b..d8bb3c89cc 100644 --- ql/src/test/results/clientpositive/fouter_join_ppr.q.out +++ ql/src/test/results/clientpositive/fouter_join_ppr.q.out @@ -90,6 +90,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -111,6 +113,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -159,6 +163,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -207,6 +213,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -384,6 +392,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -405,6 +415,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -453,6 +465,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -501,6 +515,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -682,6 +698,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -703,6 +721,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -751,6 +771,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -799,6 +821,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -976,6 +1000,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -997,6 +1023,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -1045,6 +1073,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1093,6 +1123,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git ql/src/test/results/clientpositive/groupby2_noskew.q.out ql/src/test/results/clientpositive/groupby2_noskew.q.out index 782a1a2a75..4022d30419 100644 --- ql/src/test/results/clientpositive/groupby2_noskew.q.out +++ ql/src/test/results/clientpositive/groupby2_noskew.q.out @@ -54,16 +54,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out index 9a070aca3f..717a99367b 100644 --- ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out +++ ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out @@ -55,16 +55,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out index 0f77b3d1ca..ee5d4dce4e 100644 --- ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out +++ ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out @@ -92,6 +92,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 + Write Type: INSERT Stage: Stage-0 Move Operator @@ -102,6 +103,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 + micromanaged table: true Stage: Stage-3 Stats Work @@ -131,6 +133,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 + Write Type: INSERT Stage: Stage-1 Move Operator @@ -141,6 +144,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 + micromanaged table: true Stage: Stage-5 Stats Work diff --git ql/src/test/results/clientpositive/groupby_rollup1.q.out ql/src/test/results/clientpositive/groupby_rollup1.q.out index b7e93d9c25..e3daac158b 100644 --- ql/src/test/results/clientpositive/groupby_rollup1.q.out +++ ql/src/test/results/clientpositive/groupby_rollup1.q.out @@ -485,16 +485,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 + micromanaged table: true Stage: Stage-4 Stats Work @@ -554,16 +556,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 + Write Type: INSERT Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 + micromanaged table: true Stage: Stage-7 Stats Work diff --git ql/src/test/results/clientpositive/input15.q.out ql/src/test/results/clientpositive/input15.q.out index 13bdbf21e6..a63b510590 100644 --- ql/src/test/results/clientpositive/input15.q.out +++ ql/src/test/results/clientpositive/input15.q.out @@ -17,6 +17,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.TEST15 + table properties: + transactional true + transactional_properties insert_only PREHOOK: query: CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientpositive/input30.q.out ql/src/test/results/clientpositive/input30.q.out index 90c9732117..59fd055b76 100644 --- ql/src/test/results/clientpositive/input30.q.out +++ ql/src/test/results/clientpositive/input30.q.out @@ -64,16 +64,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tst_dest30 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tst_dest30 + micromanaged table: true Stage: Stage-2 Stats Work @@ -148,16 +150,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest30 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest30 + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/inputddl1.q.out ql/src/test/results/clientpositive/inputddl1.q.out index 12d655dec5..59f2690082 100644 --- ql/src/test/results/clientpositive/inputddl1.q.out +++ ql/src/test/results/clientpositive/inputddl1.q.out @@ -16,6 +16,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.INPUTDDL1 + table properties: + transactional true + transactional_properties insert_only PREHOOK: query: CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientpositive/inputddl2.q.out ql/src/test/results/clientpositive/inputddl2.q.out index fbb2ecebf0..c142a50986 100644 --- ql/src/test/results/clientpositive/inputddl2.q.out +++ ql/src/test/results/clientpositive/inputddl2.q.out @@ -17,6 +17,9 @@ STAGE PLANS: partition columns: ds string, country string serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.INPUTDDL2 + table properties: + transactional true + transactional_properties insert_only PREHOOK: query: CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientpositive/rcfile_null_value.q.out ql/src/test/results/clientpositive/rcfile_null_value.q.out index e2a8ae0c4e..3430918d9d 100644 --- ql/src/test/results/clientpositive/rcfile_null_value.q.out +++ ql/src/test/results/clientpositive/rcfile_null_value.q.out @@ -149,16 +149,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.dest1_rc + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.dest1_rc + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/sample8.q.out ql/src/test/results/clientpositive/sample8.q.out index 365b7cfa31..686f3bdd04 100644 --- ql/src/test/results/clientpositive/sample8.q.out +++ ql/src/test/results/clientpositive/sample8.q.out @@ -97,6 +97,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -145,6 +147,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -193,6 +197,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -241,6 +247,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git ql/src/test/results/clientpositive/show_create_table_delimited.q.out ql/src/test/results/clientpositive/show_create_table_delimited.q.out index 9fb35c5dda..0552f952d4 100644 --- ql/src/test/results/clientpositive/show_create_table_delimited.q.out +++ ql/src/test/results/clientpositive/show_create_table_delimited.q.out @@ -39,6 +39,8 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'transactional'='true', + 'transactional_properties'='insert_only', #### A masked pattern was here #### PREHOOK: query: DROP TABLE tmp_showcrt1 PREHOOK: type: DROPTABLE diff --git ql/src/test/results/clientpositive/stats1.q.out ql/src/test/results/clientpositive/stats1.q.out index 2675ea5b7e..8e7cd7c2f8 100644 --- ql/src/test/results/clientpositive/stats1.q.out +++ ql/src/test/results/clientpositive/stats1.q.out @@ -75,6 +75,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + Write Type: INSERT TableScan alias: s2 Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE @@ -92,16 +93,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + micromanaged table: true Stage: Stage-3 Stats Work @@ -183,6 +186,8 @@ Table Parameters: numRows 26 rawDataSize 199 totalSize 225 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -224,6 +229,8 @@ Table Parameters: numRows 26 rawDataSize 199 totalSize 1583 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/stats15.q.out ql/src/test/results/clientpositive/stats15.q.out index eb8a995e29..45797bf0b7 100644 --- ql/src/test/results/clientpositive/stats15.q.out +++ ql/src/test/results/clientpositive/stats15.q.out @@ -46,6 +46,8 @@ Table Parameters: numRows 500 rawDataSize 5312 totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -148,6 +150,8 @@ Table Parameters: numRows 1500 rawDataSize 15936 totalSize 17436 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -284,6 +288,8 @@ Table Parameters: numRows 1500 rawDataSize 15936 totalSize 17436 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/transform_ppr1.q.out ql/src/test/results/clientpositive/transform_ppr1.q.out index b0c23931b2..ffd050c216 100644 --- ql/src/test/results/clientpositive/transform_ppr1.q.out +++ ql/src/test/results/clientpositive/transform_ppr1.q.out @@ -105,6 +105,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -153,6 +155,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -201,6 +205,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -249,6 +255,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git ql/src/test/results/clientpositive/union10.q.out ql/src/test/results/clientpositive/union10.q.out index f5e2b25801..79d664924d 100644 --- ql/src/test/results/clientpositive/union10.q.out +++ ql/src/test/results/clientpositive/union10.q.out @@ -27,7 +27,7 @@ STAGE DEPENDENCIES: Stage-2 depends on stages: Stage-1, Stage-9, Stage-10 Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6 Stage-5 - Stage-0 depends on stages: Stage-5, Stage-4, Stage-7 + Stage-0 depends on stages: Stage-5, Stage-4, Stage-7, Stage-2 Stage-3 depends on stages: Stage-0 Stage-4 Stage-6 @@ -88,6 +88,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + Write Type: INSERT TableScan Union Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE @@ -103,6 +104,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + Write Type: INSERT TableScan Union Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE @@ -118,25 +120,24 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + Write Type: INSERT Stage: Stage-8 Conditional Operator Stage: Stage-5 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + micromanaged table: true Stage: Stage-3 Stats Work @@ -167,10 +168,7 @@ STAGE PLANS: name: default.tmptable Stage: Stage-7 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-9 Map Reduce diff --git ql/src/test/results/clientpositive/union31.q.out ql/src/test/results/clientpositive/union31.q.out index 4edaed63b8..4f503aa064 100644 --- ql/src/test/results/clientpositive/union31.q.out +++ ql/src/test/results/clientpositive/union31.q.out @@ -181,16 +181,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 + micromanaged table: true Stage: Stage-3 Stats Work @@ -225,16 +227,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t4 + Write Type: INSERT Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t4 + micromanaged table: true Stage: Stage-5 Stats Work @@ -428,6 +432,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t5 + Write Type: INSERT Group By Operator aggregations: sum(VALUE._col0) keys: KEY._col0 (type: string) @@ -446,16 +451,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t6 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t5 + micromanaged table: true Stage: Stage-4 Stats Work @@ -464,12 +471,13 @@ STAGE PLANS: Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t6 + micromanaged table: true Stage: Stage-5 Stats Work @@ -756,6 +764,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t7 + Write Type: INSERT Group By Operator aggregations: count(1) keys: KEY._col0 (type: string) @@ -774,16 +783,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t8 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t7 + micromanaged table: true Stage: Stage-4 Stats Work @@ -792,12 +803,13 @@ STAGE PLANS: Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t8 + micromanaged table: true Stage: Stage-5 Stats Work diff --git ql/src/test/results/clientpositive/union6.q.out ql/src/test/results/clientpositive/union6.q.out index f7a578c654..771c3527b4 100644 --- ql/src/test/results/clientpositive/union6.q.out +++ ql/src/test/results/clientpositive/union6.q.out @@ -23,7 +23,7 @@ STAGE DEPENDENCIES: Stage-2 depends on stages: Stage-1 Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6 Stage-5 - Stage-0 depends on stages: Stage-5, Stage-4, Stage-7 + Stage-0 depends on stages: Stage-5, Stage-4, Stage-7, Stage-2 Stage-3 depends on stages: Stage-0 Stage-4 Stage-6 @@ -78,6 +78,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + Write Type: INSERT TableScan alias: s2 Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE @@ -95,25 +96,24 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + Write Type: INSERT Stage: Stage-8 Conditional Operator Stage: Stage-5 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + micromanaged table: true Stage: Stage-3 Stats Work @@ -144,10 +144,7 @@ STAGE PLANS: name: default.tmptable Stage: Stage-7 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection PREHOOK: query: insert overwrite table tmptable select unionsrc.key, unionsrc.value FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1 diff --git ql/src/test/results/clientpositive/union_remove_1.q.out ql/src/test/results/clientpositive/union_remove_1.q.out index 2e1c7e357c..bbada7b6ff 100644 --- ql/src/test/results/clientpositive/union_remove_1.q.out +++ ql/src/test/results/clientpositive/union_remove_1.q.out @@ -83,16 +83,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 + micromanaged table: true Stage: Stage-2 Map Reduce @@ -131,6 +133,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 + Write Type: INSERT PREHOOK: query: insert overwrite table outputTbl1 SELECT * @@ -173,6 +176,8 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 2 totalSize 40 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/vector_decimal_6.q.out ql/src/test/results/clientpositive/vector_decimal_6.q.out index 8896459f73..fbca6d40eb 100644 --- ql/src/test/results/clientpositive/vector_decimal_6.q.out +++ ql/src/test/results/clientpositive/vector_decimal_6.q.out @@ -564,12 +564,14 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.DECIMAL_6_3 + Write Type: INSERT Stage: Stage-0 Move Operator files: hdfs directory: true #### A masked pattern was here #### + Write Type: INSERT Stage: Stage-3 Create Table Operator: @@ -579,6 +581,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde name: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.DECIMAL_6_3 + table properties: + transactional true + transactional_properties insert_only Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/vector_varchar_4.q.out ql/src/test/results/clientpositive/vector_varchar_4.q.out index 00a82c3813..957be0bcb5 100644 --- ql/src/test/results/clientpositive/vector_varchar_4.q.out +++ ql/src/test/results/clientpositive/vector_varchar_4.q.out @@ -170,6 +170,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe name: default.varchar_lazy_binary_columnar + Write Type: INSERT Execution mode: vectorized Map Vectorization: enabled: true @@ -185,20 +186,18 @@ STAGE PLANS: Conditional Operator Stage: Stage-4 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe name: default.varchar_lazy_binary_columnar + micromanaged table: true Stage: Stage-2 Stats Work @@ -219,8 +218,5 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat Stage: Stage-6 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection diff --git service/src/test/org/apache/hive/service/cli/CLIServiceTest.java service/src/test/org/apache/hive/service/cli/CLIServiceTest.java index bc6648e408..410cf3e7dc 100644 --- service/src/test/org/apache/hive/service/cli/CLIServiceTest.java +++ service/src/test/org/apache/hive/service/cli/CLIServiceTest.java @@ -156,7 +156,7 @@ public void testExecuteStatement() throws Exception { OperationHandle opHandle; String queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + + " = true"; opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); client.closeOperation(opHandle); @@ -213,7 +213,7 @@ public void testExecuteStatementAsync() throws Exception { // Change lock manager, otherwise unit-test doesn't go through queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + + " = true"; opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); client.closeOperation(opHandle); @@ -499,7 +499,7 @@ private SessionHandle openSession(Map confOverlay) SessionState.get().setIsHiveServerQuery(true); // Pretend we are in HS2. String queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + + " = true"; client.executeStatement(sessionHandle, queryString, confOverlay); return sessionHandle; } @@ -587,8 +587,8 @@ public void testConfOverlay() throws Exception { String tabName = "TEST_CONF_EXEC"; String tabNameVar = "tabNameVar"; - String setLockMgr = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + String setLockMgr = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + + " = true"; OperationHandle opHandle = client.executeStatement(sessionHandle, setLockMgr, null); client.closeOperation(opHandle); diff --git service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java index c4f5451d9d..dd2f491cbf 100644 --- service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java +++ service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java @@ -47,7 +47,6 @@ @Test public void testQueryInfoInHookContext() throws IllegalAccessException, ClassNotFoundException, InstantiationException, HiveSQLException { HiveConf conf = new HiveConf(TestQueryHooks.class); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.set(HiveConf.ConfVars.HIVE_QUERY_LIFETIME_HOOKS.varname, QueryInfoVerificationHook.class.getName()); diff --git service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java index 646159f1e4..c60ede7c02 100644 --- service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java +++ service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java @@ -74,7 +74,6 @@ public void setup() throws Exception { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_SESSION_CHECK_INTERVAL, "3s"); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); conf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, false); MetricsFactory.init(conf); diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index d18ddc89cb..c6e6deaa2b 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -819,7 +819,7 @@ public static ConfVars getMetaConf(String name) { "no transactions."), // Metastore always support concurrency, but certain ACID tests depend on this being set. We // need to do the work to detangle this - HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", "hive.support.concurrency", false, + HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", "hive.support.concurrency", true, "Whether Hive supports concurrency control or not. \n" + "A ZooKeeper instance must be up and running when using zookeeper Hive lock manager "),