diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index be83489cb3..a131b7afcf 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1845,7 +1845,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "filter operators."), // Concurrency - HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", false, + HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", true, "Whether Hive supports concurrency control or not. \n" + "A ZooKeeper instance must be up and running when using zookeeper Hive lock manager "), HIVE_LOCK_MANAGER("hive.lock.manager", "org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager", ""), @@ -1894,7 +1894,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal // Transactions HIVE_TXN_MANAGER("hive.txn.manager", - "org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager", + "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager", "Set to org.apache.hadoop.hive.ql.lockmgr.DbTxnManager as part of turning on Hive\n" + "transactions, which also requires appropriate settings for hive.compactor.initiator.on,\n" + "hive.compactor.worker.threads, hive.support.concurrency (true),\n" + @@ -2982,7 +2982,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal + "When it is set to false, only [a-zA-Z_0-9]+ are supported.\n" + "The only supported special character right now is '/'. This flag applies only to quoted table names.\n" + "The default value is true."), - HIVE_CREATE_TABLES_AS_INSERT_ONLY("hive.create.as.insert.only", false, + HIVE_CREATE_TABLES_AS_INSERT_ONLY("hive.create.as.insert.only", true, "Whether the eligible tables should be created as ACID insert-only by default. Does \n" + "not apply to external tables, the ones using storage handlers, etc."), // role names are case-insensitive diff --git data/conf/llap/hive-site.xml data/conf/llap/hive-site.xml index cdda875ddb..74a5030d98 100644 --- data/conf/llap/hive-site.xml +++ data/conf/llap/hive-site.xml @@ -348,4 +348,19 @@ 99 + + hive.support.concurrency + true + + + + hive.txn.manager + org.apache.hadoop.hive.ql.lockmgr.DbTxnManager + + + + hive.create.as.insert.only + true + + diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java index bce246f7fb..c3bafc25ba 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java @@ -95,7 +95,6 @@ protected void setUp() throws Exception { hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hcatConf.setTimeVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 60, TimeUnit.SECONDS); - hcatConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); clientWH = new Warehouse(hcatConf); msc = new HiveMetaStoreClient(hcatConf); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); @@ -192,7 +191,6 @@ public void testCustomPerms() throws Exception { private void callHCatCli(String[] args) { List argsList = new ArrayList(); - argsList.add("-Dhive.support.concurrency=false"); argsList .add("-Dhive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); argsList.addAll(Arrays.asList(args)); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java index 1606982574..0f5b45dcfd 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java @@ -43,7 +43,6 @@ protected void setUp() throws Exception { HiveConf hcatConf = new HiveConf(this.getClass()); hcatConf.set(ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(ConfVars.POSTEXECHOOKS.varname, ""); - hcatConf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName()); hcatDriver = new Driver(hcatConf); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java index c77bc48e6c..1e9e6f0d60 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java @@ -235,8 +235,6 @@ public LocalMetaServer() { HCatSemanticAnalyzer.class.getName()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); } diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java index ff56234cc1..9ffa88bc8b 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java @@ -47,7 +47,7 @@ public static Driver instantiateDriver(MiniCluster cluster) { } hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); LOG.debug("Hive conf : {}", hiveConf.getAllProperties()); Driver driver = new Driver(hiveConf); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java index f78eb15622..f64dffaa0f 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java @@ -79,7 +79,7 @@ protected void setUpHiveConf() { hiveConf = new HiveConf(this.getClass()); hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, ""); hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, ""); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, TEST_WAREHOUSE_DIR); hiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); hiveConf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java index ba42ffd627..2b54342162 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java @@ -189,7 +189,6 @@ private static void initializeSetup() throws Exception { HCatSemanticAnalyzer.class.getName()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java index 091efb61d4..499520bfa2 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java @@ -121,8 +121,6 @@ public static void setup() throws Exception { HCatSemanticAnalyzer.class.getName()); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - "false"); msc = new HiveMetaStoreClient(hcatConf); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java index 31857bf643..b2a3448b89 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java @@ -62,7 +62,6 @@ public void Initialize() throws Exception { hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java index 4a6c6a3a6f..daa09e66f0 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java @@ -86,7 +86,6 @@ public void setUp() throws Exception { HiveConf hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java index ea9cdda31c..5652929b85 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java @@ -120,7 +120,6 @@ public static void setUpBeforeClass() throws Exception { HiveConf hiveConf = new HiveConf(TestHCatLoaderComplexSchema.class); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java index 496f3c85da..ca2ec60759 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java @@ -163,7 +163,6 @@ public void setup() throws Exception { HiveConf hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java index 40ea923858..734547337e 100644 --- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java +++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java @@ -107,7 +107,6 @@ public void setUp() throws Exception { HiveConf hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR); hiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); hiveConf diff --git hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java index 9f7c207214..a8b70ecbcc 100644 --- hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java +++ hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java @@ -69,7 +69,6 @@ protected void setUp() throws Exception { hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.METASTOREURIS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); hiveConf.set(HCatConstants.HCAT_MSGBUS_TOPIC_PREFIX, "planetlab.hcat"); diff --git hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java index dc8eee1aac..2653804eb3 100644 --- hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java +++ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java @@ -206,7 +206,6 @@ public TestStreaming() throws Exception { conf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); } conf.setBoolVar(HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI, true); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); dbFolder.create(); diff --git hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java index 78e767e7fc..19f64c8529 100644 --- hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java +++ hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java @@ -128,8 +128,6 @@ public static void startMetaStoreServer() throws Exception { HCatSemanticAnalyzer.class.getName()); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); } diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java index 2fe4045835..b3bb5d9f3b 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java @@ -299,7 +299,7 @@ private void setUpMetastore() throws Exception { //is present only in the ql/test directory hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, "jdbc:derby:" + new File(workDir + "/metastore_db") + ";create=true"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.toString(), diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java index 120b4af826..d1da0f3f75 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java @@ -78,7 +78,6 @@ public void Initialize() throws Exception { URI fsuri = getFileSystem().getUri(); Path whPath = new Path(fsuri.getScheme(), fsuri.getAuthority(), getTestDir()); - hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); hcatConf.set(ConfVars.METASTOREWAREHOUSE.varname, whPath.toString()); diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java index 308ab0d36a..8f9896c3f9 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java @@ -237,7 +237,6 @@ public static void connectToMetastore() throws Exception { DbNotificationListener.class.getName()); conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS, MockMetaStoreEventListener.class.getName()); conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_DB_LISTENER_TTL, String.valueOf(EVENTS_TTL) + "s"); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.FIRE_EVENTS_FOR_DML, true); conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL, DummyRawStoreFailEvent.class.getName()); diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java index d2dbe8f287..9db93944d5 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java @@ -74,7 +74,6 @@ public void setup() throws Exception { hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, warehouseDir); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java index 0f0ae547f3..eef43744a2 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java @@ -55,7 +55,7 @@ public static void beforeTestBase() throws Exception { hiveConf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); hiveConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java index 1cd0ee8842..c968d0c825 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java @@ -54,7 +54,6 @@ public static void setUpBeforeClass() throws Exception { confOverlay.put(ConfVars.PREEXECHOOKS.varname, PreExecHook.class.getName()); confOverlay.put(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, SemanticAnalysisHook.class.getName()); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "" + Boolean.FALSE); HiveConf hiveConf = new HiveConf(); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java index 5e70d68803..ca104d8af9 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java @@ -55,7 +55,6 @@ public static void beforeTest() throws Exception { // set a small time unit as cookie max age so that the server sends a 401 hiveConf.setTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_COOKIE_MAX_AGE, 1, TimeUnit.SECONDS); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); miniHS2.start(new HashMap()); diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java index 3153b9f9c0..cdbf6bd075 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java @@ -47,7 +47,6 @@ public static void beforeTest() throws Exception { HiveConf hiveConf = new HiveConf(); SSLTestUtils.setMetastoreSslConf(hiveConf); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); diff --git itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java index 6cbcf8ca7c..b5df7474e2 100644 --- itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java +++ itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java @@ -85,7 +85,6 @@ public static void setBinaryConfOverlay(Map confOverlay) { public static void setupTestTableWithData(String tableName, Path dataFilePath, Connection hs2Conn) throws Exception { Statement stmt = hs2Conn.createStatement(); - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop table if exists " + tableName); stmt.execute("create table " + tableName diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java index 7f2517b1b8..3fe14c206b 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java @@ -146,7 +146,6 @@ public static void setUpBeforeClass() throws Exception { PostExecHook.class.getName()); hiveConf.setVar(ConfVars.SEMANTIC_ANALYZER_HOOK, SemanticAnalysisHook.class.getName()); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java index f8ed4196bc..1ea1ee5650 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/AbstractTestAuthorizationApiAuthorizer.java @@ -64,7 +64,7 @@ protected static void setup() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); msc = new HiveMetaStoreClient(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java index 91fc706d51..f45115fea6 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java @@ -175,7 +175,6 @@ public static void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hiveConf.setVar(ConfVars.METASTORE_FILTER_HOOK, DummyMetaStoreFilterHookImpl.class.getName()); UtilsForTest.setNewDerbyDbLocation(hiveConf, TestFilterHooks.class.getSimpleName()); int port = MetaStoreTestUtils.startMetaStoreWithRetry(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java index 4b95fe7be6..8765f6d469 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreAlterColumnPar.java @@ -53,7 +53,6 @@ public static void startServices() throws Exception { HiveConf hiveConf = new HiveConf(); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 2); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 2); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder().withMiniMR().withRemoteMetastore().withConf(hiveConf).build(); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java index c29a34dc37..d334b0a97f 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java @@ -81,7 +81,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java index 7cf351fb35..464a0eff93 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMarkPartition.java @@ -52,7 +52,6 @@ protected void setUp() throws Exception { hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); } diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java index e44cfca8ee..116ba3e8bb 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java @@ -55,7 +55,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java index e8031066c2..d0a7dbcc4f 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java @@ -104,7 +104,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java index 9623fedff9..cba775ee01 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListenerOnlyOnCommit.java @@ -58,7 +58,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java index 49823134a7..abf671eaee 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreInitListener.java @@ -48,7 +48,6 @@ protected void setUp() throws Exception { hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(hiveConf)); msc = new HiveMetaStoreClient(hiveConf); driver = new Driver(hiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java index 6f5a963e16..926c0f9040 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java @@ -44,7 +44,6 @@ public static void before() throws Exception { hiveConf = new HiveConf(TestMetaStoreMetrics.class); hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_METRICS, true); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java index 0a034d3593..a9e6508775 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java @@ -54,7 +54,6 @@ protected void setUp() throws Exception { System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); System.setProperty(HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL.toString(), "true"); hiveConf = new HiveConf(this.getClass()); - System.setProperty("hive.support.concurrency", "false"); System.setProperty("hive.metastore.event.listeners", DummyListener.class.getName()); System.setProperty("hive.metastore.pre.event.listeners", diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java index 62d0109b5f..35889bf0ee 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java @@ -119,7 +119,6 @@ public static void setUpBeforeClass() throws Exception { con = getConnection(miniHS2.getBaseJdbcURL() + ";create=true"); try (Statement stmt = con.createStatement()) { Assert.assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop database if exists " + testDbName + " cascade"); stmt.execute("create database " + testDbName); } @@ -131,7 +130,6 @@ public static void tearDownAfterClass() { try { stmt = con.createStatement(); // drop test db and its tables and views - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop database if exists " + testDbName + " cascade"); FileSystem fs = FileSystem.get(conf); fs.deleteOnExit(ShimLoader.getHadoopShims().getCurrentTrashPath(conf, fs)); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java index 179eed95d0..b297bbc270 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java @@ -83,7 +83,6 @@ protected void setUp() throws Exception { int port = MetaStoreTestUtils.startMetaStoreWithRetry(); conf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); conf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - conf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, new URI(tmppath + "/warehouse").getPath()); // Initialize second mocked filesystem (implement only necessary stuff) diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java index e78318035a..276f5e36b5 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMetaStoreLimitPartitionRequest.java @@ -68,7 +68,6 @@ public static void beforeTest() throws Exception { conf = new HiveConf(); DriverManager.setLoginTimeout(0); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setIntVar(HiveConf.ConfVars.METASTORE_LIMIT_PARTITION_REQUEST, PARTITION_REQUEST_LIMIT); conf.setBoolVar(HiveConf.ConfVars.METASTORE_INTEGER_JDO_PUSHDOWN, true); conf.setBoolVar(HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL, true); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java index d73cd6426c..febe06868a 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java @@ -80,7 +80,6 @@ protected void setUp() { } } - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // copy the test files into hadoop if required. int i = 0; diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java index 2170ca3706..a22beb5342 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java @@ -35,7 +35,6 @@ public void testHookLoading() throws Exception{ HiveConf conf = new HiveConf(this.getClass()); conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, DummySemanticAnalyzerHook.class.getName()); - conf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(conf); Driver driver = new Driver(conf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index 55acd1df36..268967dd80 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -147,7 +147,6 @@ public static void setUpBeforeClass() throws Exception { hconf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hconf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hconf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hconf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); hconf.set(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL.varname, "org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore"); hconf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java index cde7a3e33c..a4be42b88f 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java @@ -121,7 +121,7 @@ private void initialize(String cmRoot, String warehouseRoot, hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java index dc3af3c186..e51b55d573 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java @@ -78,7 +78,7 @@ public void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java index 6a668aa40c..2639f58703 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java @@ -66,7 +66,6 @@ protected void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java index 57ff8c9ae7..39969f789a 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java @@ -76,7 +76,6 @@ protected void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java index edb46fd979..bafdeceab7 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java @@ -109,7 +109,6 @@ protected void setUp() throws Exception { clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java index 2059370fd4..e7378a2051 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java @@ -63,7 +63,6 @@ public static void setUp() throws Exception { clientHiveConf = new HiveConf(); clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); SessionState.start(new CliSessionState(clientHiveConf)); msc = new HiveMetaStoreClient(clientHiveConf); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java index 19694b093e..30337510e6 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java @@ -97,7 +97,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, true); conf.setVar(ConfVars.HIVE_TXN_MANAGER, DbTxnManager.class.getName()); conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java index 5922a8c603..758df3ab92 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java @@ -119,7 +119,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); UtilsForTest.setNewDerbyDbLocation(conf, TestHiveAuthorizerShowFilters.class.getSimpleName()); SessionState.start(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java index 065f258abc..775a6b3b5d 100644 --- itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java +++ itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java @@ -116,8 +116,6 @@ private static void createTable() throws ClassNotFoundException, SQLException { Statement stmt = con.createStatement(); assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); - HiveConf conf = new HiveConf(); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); @@ -461,8 +459,7 @@ public void testBeelineShellCommand() throws Throwable { */ @Test public void testNullDefault() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "select null from " + tableName + " limit 1 ;\n"; + final String SCRIPT_TEXT = "select null from " + tableName + " limit 1 ;\n"; final String EXPECTED_PATTERN = "NULL"; testScriptFile(SCRIPT_TEXT, getBaseArgs(miniHS2.getBaseJdbcURL()), EXPECTED_PATTERN, true); } @@ -473,8 +470,7 @@ public void testNullDefault() throws Throwable { */ @Test public void testNullNonEmpty() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "!set nullemptystring false\n select null from " + tableName + " limit 1 ;\n"; + final String SCRIPT_TEXT = "!set nullemptystring false\n select null from " + tableName + " limit 1 ;\n"; final String EXPECTED_PATTERN = "NULL"; testScriptFile(SCRIPT_TEXT, getBaseArgs(miniHS2.getBaseJdbcURL()), EXPECTED_PATTERN, true); } @@ -494,8 +490,7 @@ public void testGetVariableValue() throws Throwable { */ @Test public void testNullEmpty() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "!set nullemptystring true\n select 'abc',null,'def' from " + tableName + " limit 1 ;\n"; + final String SCRIPT_TEXT = "!set nullemptystring true\n select 'abc',null,'def' from " + tableName + " limit 1 ;\n"; final String EXPECTED_PATTERN = "abc,,def"; List argList = getBaseArgs(miniHS2.getBaseJdbcURL()); @@ -662,13 +657,11 @@ public void testCSVOutput() throws Throwable { } private String getFormatTestQuery() { - return "set hive.support.concurrency = false;\n" + - "select 1, null, 'defg', 'ab\"c', 1.0D from " + tableName + " limit 1 ;\n"; + return "select 1, null, 'defg', 'ab\"c', 1.0D from " + tableName + " limit 1 ;\n"; } private String getFormatTestQueryForEableQuotes() { - return "set hive.support.concurrency = false;\n" + - "select 1, null, 'defg', 'ab\"c', '\"aa\"', 1.0D from " + tableName + " limit 1 ;\n"; + return "select 1, null, 'defg', 'ab\"c', '\"aa\"', 1.0D from " + tableName + " limit 1 ;\n"; } /** @@ -679,8 +672,7 @@ private String getFormatTestQueryForEableQuotes() { */ @Test public void testNullEmptyCmdArg() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "select 'abc',null,'def' from " + tableName + " limit 1 ;\n"; + final String SCRIPT_TEXT = "select 'abc',null,'def' from " + tableName + " limit 1 ;\n"; final String EXPECTED_PATTERN = "'abc','','def'"; List argList = getBaseArgs(miniHS2.getBaseJdbcURL()); @@ -771,8 +763,7 @@ public void testEmbeddedBeelineConnection() throws Throwable{ @Test public void testQueryProgress() throws Throwable { final String SCRIPT_TEXT = - "set hive.support.concurrency = false;\n" - + "set hive.server2.logging.operation.level=execution;\n" + "set hive.server2.logging.operation.level=execution;\n" + "select count(*) from " + tableName + ";\n"; // Check for part of log message as well as part of progress information final String EXPECTED_PATTERN = "ELAPSED TIME"; @@ -797,8 +788,7 @@ public void testQueryProgress() throws Throwable { */ @Test public void testQueryProgressParallel() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "set hive.exec.parallel = true;\n" + + final String SCRIPT_TEXT = "set hive.exec.parallel = true;\n" + "select count(*) from " + tableName + ";\n"; // Check for part of log message as well as part of progress information final String EXPECTED_PATTERN = "Number of reducers determined to be."; @@ -813,8 +803,7 @@ public void testQueryProgressParallel() throws Throwable { */ @Test public void testQueryProgressHidden() throws Throwable { - final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + - "!set silent true\n" + + final String SCRIPT_TEXT = "!set silent true\n" + "select count(*) from " + tableName + ";\n"; final String EXPECTED_PATTERN = "Executing command"; testScriptFile(SCRIPT_TEXT, getBaseArgs(miniHS2.getBaseJdbcURL()), OutStream.ERR, @@ -825,7 +814,7 @@ public void testQueryProgressHidden() throws Throwable { public void testQueryProgressWithHiveServer2ProgressBarDisabled() throws Throwable { final String SCRIPT_TEXT = - "set hive.support.concurrency = false;\nset hive.server2.in.place.progress=false;\n" + + "set hive.server2.in.place.progress=false;\n" + "select count(*) from " + tableName + ";\n"; // Check for part of log message as well as part of progress information final String EXPECTED_PATTERN = "(?=Reducer 2\\:).*(?=Map 1\\:)"; diff --git itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java index 5dc1465d64..b030b646a9 100644 --- itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java +++ itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java @@ -304,7 +304,6 @@ private static void createTable() throws ClassNotFoundException, SQLException { Statement stmt = con.createStatement(); assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); try { stmt.execute("drop table if exists " + tableName); } catch (Exception ex) { diff --git itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java index 2fb64536a6..3738aae3a7 100644 --- itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java +++ itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java @@ -161,7 +161,7 @@ public void before() throws Exception { hiveConf = new HiveConf(); miniHS2 = getNewMiniHS2(); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + // confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, "binary"); } diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java index 62ee66f717..039846b7de 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractJdbcTriggersTest.java @@ -66,7 +66,7 @@ public static void beforeTest() throws Exception { System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setVar(ConfVars.HIVE_SERVER2_TEZ_DEFAULT_QUEUES, "default"); conf.setTimeVar(ConfVars.HIVE_TRIGGER_VALIDATION_INTERVAL_MS, 100, TimeUnit.MILLISECONDS); @@ -196,4 +196,4 @@ WMTrigger wmTriggerFromTrigger(Trigger trigger) { result.setActionExpression(trigger.getAction().toString()); return result; } -} \ No newline at end of file +} diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java index 7bbafa4e1b..35320b5b13 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -199,7 +199,6 @@ public static void setUpBeforeClass() throws SQLException, ClassNotFoundExceptio con = getConnection(defaultDbName + ";create=true"); Statement stmt = con.createStatement(); assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop database if exists " + testDbName + " cascade"); stmt.execute("create database " + testDbName); stmt.execute("use " + testDbName); @@ -211,7 +210,6 @@ public static void setUpBeforeClass() throws SQLException, ClassNotFoundExceptio public static void tearDownAfterClass() throws Exception { Statement stmt = con.createStatement(); // drop test db and its tables and views - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop database if exists " + testDbName + " cascade"); stmt.close(); con.close(); @@ -2645,7 +2643,6 @@ public void testNonAsciiReturnValues() throws Exception { String nonAsciiString = "Garçu Kôkaku kidôtai"; Path nonAsciiFilePath = new Path(dataFileDir, "non_ascii_tbl.txt"); Statement stmt = con.createStatement(); - stmt.execute("set hive.support.concurrency = false"); // Create table stmt.execute("create table " + nonAsciiTableName + " (key int, value string) " diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java index dd24f0261f..c97d0df636 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithLocalClusterSpark.java @@ -82,12 +82,10 @@ private static HiveConf createHiveConf() { public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); conf = createHiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); DriverManager.setLoginTimeout(0); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2(conf, MiniClusterType.MR); Map overlayProps = new HashMap(); overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java index 84644d1d89..5518c32ba7 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHA.java @@ -68,12 +68,10 @@ public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLExcepti public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); DriverManager.setLoginTimeout(0); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder().withConf(conf).withMiniMR().withHA().build(); Map overlayProps = new HashMap(); overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index ffeee69f80..b4e7cfbb73 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -168,7 +168,6 @@ private static void createTestTables(Connection conn, String dbName) throws SQLE public static void tearDownAfterClass() throws Exception { // drop test db and its tables and views Statement stmt = conDefault.createStatement(); - stmt.execute("set hive.support.concurrency = false"); stmt.execute("drop database if exists " + testDbName + " cascade"); stmt.close(); if (conTestDb != null) { @@ -208,7 +207,6 @@ private static void startMiniHS2(HiveConf conf) throws Exception { } private static void startMiniHS2(HiveConf conf, boolean httpMode) throws Exception { - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false); conf.setBoolVar(ConfVars.HIVESTATSCOLAUTOGATHER, false); // store post-exec hooks calls so we can look at them later @@ -440,7 +438,6 @@ public void testURIDatabaseName() throws Exception{ conn = getConnection(jdbcUri+dbName,System.getProperty("user.name"),"bar"); stmt = conn .createStatement(); - stmt.execute("set hive.support.concurrency = false"); res = stmt.executeQuery("show tables"); stmt.execute(" drop table if exists table_in_non_default_schema"); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java index 71aee8f959..7eb4fe5b47 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java @@ -116,7 +116,6 @@ public static void beforeTest() throws Exception { } conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.addResource(new URL("file://" + new File(confDir).toURI().getPath() @@ -139,7 +138,6 @@ public void setUp() throws Exception { public static Connection getConnection(String jdbcURL, String user, String pwd) throws SQLException { Connection conn = DriverManager.getConnection(jdbcURL, user, pwd); - conn.createStatement().execute("set hive.support.concurrency = false"); return conn; } @@ -600,4 +598,4 @@ public void run() { private static class ExceptionHolder { Throwable throwable; } -} \ No newline at end of file +} diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java index 2156f4b4de..bbb82c29ed 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMultiSessionsHS2WithLocalClusterSpark.java @@ -91,12 +91,10 @@ private static HiveConf createHiveConf() { public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); conf = createHiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); DriverManager.setLoginTimeout(0); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2(conf, MiniClusterType.MR); Map overlayProps = new HashMap(); overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java index 84f1168b28..ef578d8844 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestNoSaslAuth.java @@ -59,7 +59,6 @@ public void run(HiveSessionHookContext sessionHookContext) public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); HiveConf conf = new HiveConf(); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setVar(ConfVars.HIVE_SERVER2_SESSION_HOOK, NoSaslSessionHook.class.getName()); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java index dc59f4b7fb..842ab8abc3 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscoveryWithMiniHS2.java @@ -56,7 +56,6 @@ public static void beforeTest() throws Exception { zkServer = new TestingServer(); Class.forName(MiniHS2.getJdbcDriverName()); hiveConf = new HiveConf(); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // Set up zookeeper dynamic service discovery configs enableZKServiceDiscoveryConfigs(hiveConf); dataFileDir = hiveConf.get("test.data.files").replace('\\', '/').replace("c:", ""); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java index 74ca958ea8..e69122e7f1 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java @@ -57,7 +57,6 @@ public static void beforeTest() throws Exception { System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setTimeVar(ConfVars.HIVE_TRIGGER_VALIDATION_INTERVAL_MS, 100, TimeUnit.MILLISECONDS); conf.setVar(ConfVars.HIVE_SERVER2_TEZ_INTERACTIVE_QUEUE, "default"); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java index 285e533fe6..fa3313c409 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersWorkloadManager.java @@ -49,7 +49,6 @@ public static void beforeTest() throws Exception { System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); conf.setTimeVar(ConfVars.HIVE_TRIGGER_VALIDATION_INTERVAL_MS, 100, TimeUnit.MILLISECONDS); conf.setVar(ConfVars.HIVE_SERVER2_TEZ_INTERACTIVE_QUEUE, "default"); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java index 88a403a0d5..caca08685e 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestXSRFFilter.java @@ -66,7 +66,6 @@ public static void afterClass() throws IOException { private void initHS2(boolean enableXSRFFilter) throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); HiveConf conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder().withConf(conf).cleanupLocalDirOnStartup(false).build(); dataFileDir = conf.get("test.data.files").replace('\\', '/').replace("c:", ""); kvDataFilePath = new Path(dataFileDir, "kv1.txt"); @@ -78,7 +77,6 @@ private void initHS2(boolean enableXSRFFilter) throws Exception { private Connection getConnection(String jdbcURL, String user, String pwd) throws SQLException { Connection conn = DriverManager.getConnection(jdbcURL, user, pwd); - conn.createStatement().execute("set hive.support.concurrency = false"); return conn; } diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java index 9a44dfab1d..ab4dc70242 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestCLIAuthzSessionContext.java @@ -63,7 +63,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // once SessionState for thread is set, CliDriver picks conf from it CliSessionState ss = new CliSessionState(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java index 273ec36d81..fc3e63a40b 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java @@ -78,7 +78,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java index eb29e228e1..622fb23c1a 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzSessionContext.java @@ -64,7 +64,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java index 692bfa0d89..64ecbdc567 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcMetadataApiAuth.java @@ -106,7 +106,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, TestAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java index 5070c765c9..a2e6c4a8d0 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthUDFBlacklist.java @@ -48,7 +48,6 @@ public void startHS2(HiveConf conf) throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, SQLStdHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java index 5e653ec75c..b5656a009f 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java @@ -50,7 +50,6 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, SQLStdHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHS2 = new MiniHS2(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java index 68a2c6719b..fdcd0a3180 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java @@ -116,8 +116,6 @@ public static void setUpBeforeClass() throws SQLException, ClassNotFoundExceptio Statement stmt1 = con1.createStatement(); assertNotNull("Statement is null", stmt1); - stmt1.execute("set hive.support.concurrency = false"); - DatabaseMetaData metadata = con1.getMetaData(); // Drop databases created by other test cases @@ -141,7 +139,6 @@ public void setUp() throws Exception { Statement stmt = con.createStatement(); assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); stmt.execute("set hive.cbo.returnpath.hiveop = true"); // drop table. ignore error. @@ -2263,7 +2260,6 @@ public void testNonAsciiReturnValues() throws Exception { String nonAsciiString = "Garçu Kôkaku kidôtai"; Path nonAsciiFilePath = new Path(dataFileDir, "non_ascii_tbl.txt"); Statement stmt = con.createStatement(); - stmt.execute("set hive.support.concurrency = false"); // Create table stmt.execute("create table " + nonAsciiTableName + " (key int, value string) " + diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java index d176db4279..d3894411bf 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java @@ -61,7 +61,7 @@ public void testRunCluster() throws Exception { break; } HiveConf conf = new HiveConf(); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + // conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_RPC_QUERY_PLAN, true); for (; idx < confFiles.length; ++idx) { diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java index d8c6beaee4..4218f99fc8 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java @@ -45,7 +45,6 @@ public static void beforeTest() throws Exception { miniHS2 = new MiniHS2(new HiveConf()); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java index c4da73e208..d44e3c68ba 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2SessionTimeout.java @@ -49,7 +49,6 @@ public static void beforeTest() throws Exception { @Before public void setUp() throws Exception { confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); confOverlay.put(ConfVars.HIVE_SERVER2_SESSION_CHECK_INTERVAL.varname, "3s"); confOverlay.put(ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT.varname, "3s"); miniHS2.start(confOverlay); diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java index 7337e9cbee..8157abb70b 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java @@ -75,10 +75,8 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, public static void setup() throws Exception { miniHS2 = new MiniHS2(new HiveConf()); confOverlay = new HashMap(); - confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); confOverlay.put(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, MetricCheckingHook.class.getName()); confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED.varname, "true"); - confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java index 18892e77d7..8bc72769f8 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestMiniHS2.java @@ -67,7 +67,6 @@ public void testConfInSession() throws Exception { hiveConf.set(ZK_TIMEOUT_KEY, ZK_TIMEOUT); // check the config used very often! - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2(hiveConf); miniHS2.start(new HashMap()); @@ -78,8 +77,6 @@ public void testConfInSession() throws Exception { checkConfVal(DUMMY_CONF_KEY, DUMMY_CONF_KEY + "=" + DUMMY_CONF_VAL, stmt); checkConfVal(ZK_TIMEOUT_KEY, ZK_TIMEOUT_KEY + "=" + ZK_TIMEOUT, stmt); - checkConfVal(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + "=" + "false", stmt); stmt.close(); hs2Conn.close(); diff --git itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java index c58767fc75..a3bcde11ce 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/TestDFSErrorHandling.java @@ -71,7 +71,6 @@ public static void startServices() throws Exception { hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 1); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 1); hiveConf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI, true); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); // Setting hive.server2.enable.doAs to True ensures that HS2 performs the query operation as // the connected user instead of the user running HS2. diff --git itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java index fd89921d34..5660b9cf11 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java @@ -49,7 +49,6 @@ public static void startServices() throws Exception { hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS, 1); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS, 1); hiveConf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI, true); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder() .withMiniMR() diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java index 830ffc2bbd..b51bca8a84 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithMr.java @@ -66,7 +66,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.set(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "verbose"); miniHS2 = new MiniHS2(hiveConf); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java index 388486d970..f4963452d5 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingAPIWithTez.java @@ -62,7 +62,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.setBoolVar(ConfVars.TEZ_EXEC_SUMMARY, false); miniHS2 = new MiniHS2(hiveConf, MiniClusterType.TEZ); confOverlay = new HashMap(); - confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } } diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java index 8febe3e79f..4f685174af 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java @@ -72,7 +72,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "execution"); miniHS2 = new MiniHS2(hiveConf); confOverlay = new HashMap(); - confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); miniHS2.start(confOverlay); } diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java index 32e2fc9489..e02fefc0d0 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java @@ -46,7 +46,6 @@ @Before public void setup() { conf = new HiveConf(); - conf.set("hive.support.concurrency", "false"); sessionManager = new SessionManager(null); sessionManager.init(conf); diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java index 147f53bdf1..f454f8eed6 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceMessageSize.java @@ -125,7 +125,6 @@ public void testMessageSize() throws Exception { Connection connection = DriverManager.getConnection(url, "hiveuser", "hive"); Statement stmt = connection.createStatement(); assertNotNull("Statement is null", stmt); - stmt.execute("set hive.support.concurrency = false"); connection.close(); stopHiveServer2(hiveServer2); diff --git itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java index 1911d2ce17..6b5250a819 100644 --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java @@ -137,7 +137,6 @@ public static void setUpBeforeClass() throws Exception { hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NOSASL.toString()); hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, transportMode); hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH, thriftHttpPath); - hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); hiveConf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index 8387208dd4..ec34b1f11e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -503,8 +503,11 @@ private DataContainer handleDynParts(Hive db, Table table, LoadTableDesc tbd, (tbd.getLbCtx() == null) ? 0 : tbd.getLbCtx().calculateListBucketingLevel(), work.getLoadTableWork().getWriteType() != AcidUtils.Operation.NOT_ACID && !tbd.isMmTable(), - work.getLoadTableWork().getTxnId(), tbd.getStmtId(), hasFollowingStatsTask(), - work.getLoadTableWork().getWriteType()); + work.getLoadTableWork().getTxnId(), + tbd.getStmtId(), + hasFollowingStatsTask(), + work.getLoadTableWork().getWriteType(), + tbd.isInsertOverwrite()); // publish DP columns to its subscribers if (dps != null && dps.size() > 0) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index d68d646eb4..1c95843f1f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -4063,7 +4063,7 @@ private static void tryDelete(FileSystem fs, Path path) { throws IOException { int skipLevels = dpLevels + lbLevels; if (filter == null) { - filter = new JavaUtils.IdPathFilter(txnId, stmtId, true); + filter = new JavaUtils.IdPathFilter(txnId, stmtId, true, false, isBaseDir); } if (skipLevels == 0) { return statusToPath(fs.listStatus(path, filter)); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java index bb105fe6ce..e402670632 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java @@ -1295,7 +1295,6 @@ public static boolean isAcidTable(CreateTableDesc td) { !AcidUtils.isInsertOnlyTable(td.getTblProps()); } - /** * Sets the acidOperationalProperties in the configuration object argument. * @param conf Mutable configuration object diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 022ba04fbe..7f55fa786f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -2100,7 +2100,7 @@ private void constructOneLBLocationMap(FileStatus fSta, * @throws HiveException */ private Set getValidPartitionsInPath( - int numDP, int numLB, Path loadPath, Long txnId, int stmtId, boolean isMmTable) throws HiveException { + int numDP, int numLB, Path loadPath, Long txnId, int stmtId, boolean isMmTable, boolean isInsertOverwrite) throws HiveException { Set validPartitions = new HashSet(); try { FileSystem fs = loadPath.getFileSystem(conf); @@ -2121,7 +2121,7 @@ private void constructOneLBLocationMap(FileStatus fSta, // where this is used; we always want to load everything; also the only case where // we have multiple statements anyway is union. Path[] leafStatus = Utilities.getMmDirectoryCandidates( - fs, loadPath, numDP, numLB, null, txnId, -1, conf, false); + fs, loadPath, numDP, numLB, null, txnId, -1, conf, isInsertOverwrite); for (Path p : leafStatus) { Path dpPath = p.getParent(); // Skip the MM directory that we have found. for (int i = 0; i < numLB; ++i) { @@ -2169,7 +2169,7 @@ private void constructOneLBLocationMap(FileStatus fSta, public Map, Partition> loadDynamicPartitions(final Path loadPath, final String tableName, final Map partSpec, final LoadFileType loadFileType, final int numDP, final int numLB, final boolean isAcid, final long txnId, final int stmtId, - final boolean hasFollowingStatsTask, final AcidUtils.Operation operation) + final boolean hasFollowingStatsTask, final AcidUtils.Operation operation, boolean isInsertOverwrite) throws HiveException { final Map, Partition> partitionsMap = @@ -2185,7 +2185,7 @@ private void constructOneLBLocationMap(FileStatus fSta, // Get all valid partition paths and existing partitions for them (if any) final Table tbl = getTable(tableName); final Set validPartitions = getValidPartitionsInPath(numDP, numLB, loadPath, txnId, stmtId, - AcidUtils.isInsertOnlyTable(tbl.getParameters())); + AcidUtils.isInsertOnlyTable(tbl.getParameters()), isInsertOverwrite); final int partsToLoad = validPartitions.size(); final AtomicInteger partitionsLoaded = new AtomicInteger(0); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index c41e371abc..7412a14faa 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -7147,6 +7147,10 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) tableDesc.setWriter(fileSinkDesc); } + if (fileSinkDesc.getInsertOverwrite()) { + ltd.setInsertOverwrite(true); + } + if (SessionState.get().isHiveServerQuery() && null != table_desc && table_desc.getSerdeClassName().equalsIgnoreCase(ThriftJDBCBinarySerDe.class.getName()) && diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java index 831ca9f99a..67ae067580 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java @@ -172,7 +172,9 @@ public boolean isTargetRewritten() { return true; } // INSERT OVERWRITE - if (getLoadTableDesc() != null && getLoadTableDesc().getLoadFileType() == LoadFileType.REPLACE_ALL) { + if (getLoadTableDesc() != null && + (getLoadTableDesc().isInsertOverwrite() || + getLoadTableDesc().getLoadFileType() == LoadFileType.REPLACE_ALL)) { return true; } // CREATE TABLE ... AS diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java index 66a4aa11be..bb1b131821 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java @@ -40,6 +40,7 @@ //table specs are to be used private int stmtId; private Long currentTransactionId; + private boolean isInsertOverwrite; // TODO: the below seem like they should just be combined into partitionDesc private org.apache.hadoop.hive.ql.plan.TableDesc table; @@ -209,6 +210,10 @@ public void setInheritTableSpecs(boolean inheritTableSpecs) { this.inheritTableSpecs = inheritTableSpecs; } + public boolean isInsertOverwrite() { return this.isInsertOverwrite; } + + public void setInsertOverwrite(boolean v) { this.isInsertOverwrite = v; } + /** * @return the lbCtx */ diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java index 9be3e14911..f5696a1d76 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java @@ -400,7 +400,6 @@ public void testFetchOperatorContextQuoting() throws Exception { @Test public void testFetchOperatorContext() throws Exception { HiveConf conf = new HiveConf(); - conf.set("hive.support.concurrency", "false"); conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java index a89e5e0dc0..5ebc480e49 100644 --- ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java @@ -90,7 +90,6 @@ public String redactQuery(String query) { } private static Driver createDriver(HiveConf conf) { - HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); Driver driver = new Driver(conf); return driver; diff --git ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java index 4d19a715c6..71c2c0d91b 100644 --- ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java @@ -49,7 +49,6 @@ @BeforeClass public static void setUpBeforeClass() { conf = new HiveConf(TestQueryHooks.class); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); } diff --git ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java index 976d83d55f..c614a3951e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java @@ -136,7 +136,6 @@ public void testCombine() throws Exception { .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_REWORK_MAPREDWORK, true); - HiveConf.setBoolVar(hiveConf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); Driver drv = new Driver(hiveConf); String tblName = "text_symlink_text"; diff --git ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java index 406bdea96a..ff3f7fd8b5 100644 --- ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java +++ ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java @@ -390,7 +390,6 @@ public void concurrencyFalse() throws Exception { HiveConf badConf = new HiveConf(); badConf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager"); - badConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); boolean sawException = false; try { TxnManagerFactory.getTxnManagerFactory().getTxnManager(badConf); diff --git ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java index 913b60c353..57c17addc3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java +++ ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java @@ -63,7 +63,6 @@ @Before public void setUp() throws Exception { - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, DummyTxnManager.class.getName()); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java index a7a76a42cb..822ff85ce3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java +++ ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java @@ -129,7 +129,6 @@ public void testMetrics() throws Exception{ conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM, "localhost"); conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT, String.valueOf(server.getPort())); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); MetricsFactory.init(conf); CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java index 7bda832a3b..078a421979 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java @@ -187,7 +187,6 @@ private static Driver createDriver() { conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); - HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS, true); SessionState.start(conf); Driver driver = new Driver(conf); diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java index 79ce2f1769..4aa0dfb944 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java @@ -183,7 +183,6 @@ private static Driver createDriver() { "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, CheckInputReadEntityDirect.class.getName()); - HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); Driver driver = new Driver(conf); return driver; diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java index 234f9796c9..d9bb6f2ffa 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java @@ -62,8 +62,6 @@ public static void onetimeSetup() throws Exception { "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, CheckInputReadEntity.class.getName()); - HiveConf - .setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); driver = new Driver(conf); } diff --git ql/src/test/results/clientpositive/acid_table_stats.q.out ql/src/test/results/clientpositive/acid_table_stats.q.out index 05a03d23bf..81ffc52447 100644 --- ql/src/test/results/clientpositive/acid_table_stats.q.out +++ ql/src/test/results/clientpositive/acid_table_stats.q.out @@ -95,7 +95,7 @@ Partition Parameters: numFiles 2 numRows 0 rawDataSize 0 - totalSize 3950 + totalSize 3980 #### A masked pattern was here #### # Storage Information @@ -133,9 +133,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: acid - Statistics: Num rows: 1 Data size: 39500 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 3980 Basic stats: PARTIAL Column stats: NONE Select Operator - Statistics: Num rows: 1 Data size: 39500 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 3980 Basic stats: PARTIAL Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -210,7 +210,7 @@ Partition Parameters: numFiles 2 numRows 1000 rawDataSize 208000 - totalSize 3950 + totalSize 3980 #### A masked pattern was here #### # Storage Information @@ -261,7 +261,7 @@ Partition Parameters: numFiles 2 numRows 1000 rawDataSize 208000 - totalSize 3950 + totalSize 3980 #### A masked pattern was here #### # Storage Information @@ -386,7 +386,7 @@ Partition Parameters: numFiles 4 numRows 1000 rawDataSize 208000 - totalSize 7904 + totalSize 7960 #### A masked pattern was here #### # Storage Information @@ -433,7 +433,7 @@ Partition Parameters: numFiles 4 numRows 2000 rawDataSize 416000 - totalSize 7904 + totalSize 7960 #### A masked pattern was here #### # Storage Information @@ -608,6 +608,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -707,21 +709,55 @@ PREHOOK: type: QUERY POSTHOOK: query: explain select count(*) from acid where ds='2008-04-08' POSTHOOK: type: QUERY STAGE DEPENDENCIES: - Stage-0 is a root stage + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: acid + Statistics: Num rows: 1000 Data size: 176000 Basic stats: COMPLETE Column stats: NONE + Select Operator + Statistics: Num rows: 1000 Data size: 176000 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 Processor Tree: ListSink PREHOOK: query: select count(*) from acid where ds='2008-04-08' PREHOOK: type: QUERY PREHOOK: Input: default@acid +PREHOOK: Input: default@acid@ds=2008-04-08 #### A masked pattern was here #### POSTHOOK: query: select count(*) from acid where ds='2008-04-08' POSTHOOK: type: QUERY POSTHOOK: Input: default@acid +POSTHOOK: Input: default@acid@ds=2008-04-08 #### A masked pattern was here #### 1000 diff --git ql/src/test/results/clientpositive/autoColumnStats_4.q.out ql/src/test/results/clientpositive/autoColumnStats_4.q.out index b3df04fc9a..68d7094f54 100644 --- ql/src/test/results/clientpositive/autoColumnStats_4.q.out +++ ql/src/test/results/clientpositive/autoColumnStats_4.q.out @@ -197,7 +197,7 @@ Table Parameters: numFiles 2 numRows 0 rawDataSize 0 - totalSize 1798 + totalSize 1852 transactional true transactional_properties default #### A masked pattern was here #### @@ -241,7 +241,7 @@ Table Parameters: numFiles 4 numRows 0 rawDataSize 0 - totalSize 2909 + totalSize 3033 transactional true transactional_properties default #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/auto_join26.q.out ql/src/test/results/clientpositive/auto_join26.q.out index 91d79857c2..5b2b800fe8 100644 --- ql/src/test/results/clientpositive/auto_join26.q.out +++ ql/src/test/results/clientpositive/auto_join26.q.out @@ -99,31 +99,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 - Select Operator - expressions: _col0 (type: int), _col1 (type: int) - outputColumnNames: key, cnt - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 848 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out index 688fdfa125..3faef6f951 100644 --- ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out +++ ql/src/test/results/clientpositive/bucket_map_join_spark4.q.out @@ -206,6 +206,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -229,6 +231,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl1 @@ -256,6 +260,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -279,6 +285,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl2 @@ -306,6 +314,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -329,6 +339,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl3 @@ -562,6 +574,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -585,6 +599,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl1 @@ -612,6 +628,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -635,6 +653,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl2 @@ -662,6 +682,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -685,6 +707,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 80 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tbl3 diff --git ql/src/test/results/clientpositive/constprog_type.q.out ql/src/test/results/clientpositive/constprog_type.q.out index 27ef1f482b..6dd0d35b86 100644 --- ql/src/test/results/clientpositive/constprog_type.q.out +++ ql/src/test/results/clientpositive/constprog_type.q.out @@ -46,25 +46,24 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + Write Type: INSERT Stage: Stage-7 Conditional Operator Stage: Stage-4 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + micromanaged table: true Stage: Stage-2 Stats Work @@ -95,10 +94,7 @@ STAGE PLANS: name: default.dest1 Stage: Stage-6 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp) diff --git ql/src/test/results/clientpositive/create_like_view.q.out ql/src/test/results/clientpositive/create_like_view.q.out index 25b927bbd3..876df97876 100644 --- ql/src/test/results/clientpositive/create_like_view.q.out +++ ql/src/test/results/clientpositive/create_like_view.q.out @@ -52,6 +52,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -114,6 +116,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -296,6 +300,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/describe_comment_nonascii.q.out ql/src/test/results/clientpositive/describe_comment_nonascii.q.out index a4d512737a..22a34ed641 100644 --- ql/src/test/results/clientpositive/describe_comment_nonascii.q.out +++ ql/src/test/results/clientpositive/describe_comment_nonascii.q.out @@ -54,6 +54,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out index c2226f6249..cf7c3a8ed6 100644 --- ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out +++ ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out @@ -174,6 +174,8 @@ STAGE PLANS: serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 7060 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -195,6 +197,8 @@ STAGE PLANS: serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 7060 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.uservisits_web_text_none diff --git ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out index 884e63c44d..ca588138e6 100644 --- ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out +++ ql/src/test/results/clientpositive/dynpart_sort_optimization_acid2.q.out @@ -54,6 +54,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.non_acid + Write Type: INSERT Stage: Stage-0 Move Operator @@ -67,6 +68,8 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.non_acid + Write Type: INSERT + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/fouter_join_ppr.q.out ql/src/test/results/clientpositive/fouter_join_ppr.q.out index 55d2a4d04b..d8bb3c89cc 100644 --- ql/src/test/results/clientpositive/fouter_join_ppr.q.out +++ ql/src/test/results/clientpositive/fouter_join_ppr.q.out @@ -90,6 +90,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -111,6 +113,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -159,6 +163,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -207,6 +213,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -384,6 +392,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -405,6 +415,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -453,6 +465,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -501,6 +515,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -682,6 +698,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -703,6 +721,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -751,6 +771,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -799,6 +821,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -976,6 +1000,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -997,6 +1023,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -1045,6 +1073,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1093,6 +1123,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git ql/src/test/results/clientpositive/groupby2_noskew.q.out ql/src/test/results/clientpositive/groupby2_noskew.q.out index 29a71f1710..716cdc008a 100644 --- ql/src/test/results/clientpositive/groupby2_noskew.q.out +++ ql/src/test/results/clientpositive/groupby2_noskew.q.out @@ -55,26 +55,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 - Select Operator - expressions: _col0 (type: string), _col1 (type: int), _col2 (type: string) - outputColumnNames: key, c1, c2 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out index f1ce8388d7..ef1f0e2ca7 100644 --- ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out +++ ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out @@ -56,26 +56,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 - Select Operator - expressions: _col0 (type: string), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int) - outputColumnNames: key, c1, c2, c3, c4 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g2 + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out index 9a6457cef2..2d76a19f65 100644 --- ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out +++ ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out @@ -94,21 +94,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator @@ -119,6 +105,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 + micromanaged table: true Stage: Stage-3 Stats Work @@ -182,21 +169,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-1 Move Operator @@ -207,6 +180,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 + micromanaged table: true Stage: Stage-7 Map Reduce diff --git ql/src/test/results/clientpositive/groupby_rollup1.q.out ql/src/test/results/clientpositive/groupby_rollup1.q.out index bc1d8a9816..605d34ca12 100644 --- ql/src/test/results/clientpositive/groupby_rollup1.q.out +++ ql/src/test/results/clientpositive/groupby_rollup1.q.out @@ -487,31 +487,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int) - outputColumnNames: key1, key2, val - Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key1, 'hll'), compute_stats(key2, 'hll'), compute_stats(val, 'hll') - mode: hash - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1 Data size: 1312 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t2 + micromanaged table: true Stage: Stage-4 Stats Work @@ -605,31 +592,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 - Select Operator - expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int) - outputColumnNames: key1, key2, val - Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key1, 'hll'), compute_stats(key2, 'hll'), compute_stats(val, 'hll') - mode: hash - outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1 Data size: 1312 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 + micromanaged table: true Stage: Stage-9 Map Reduce diff --git ql/src/test/results/clientpositive/input15.q.out ql/src/test/results/clientpositive/input15.q.out index 13bdbf21e6..a63b510590 100644 --- ql/src/test/results/clientpositive/input15.q.out +++ ql/src/test/results/clientpositive/input15.q.out @@ -17,6 +17,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.TEST15 + table properties: + transactional true + transactional_properties insert_only PREHOOK: query: CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientpositive/input30.q.out ql/src/test/results/clientpositive/input30.q.out index 84e4ae4786..fdf64c2d56 100644 --- ql/src/test/results/clientpositive/input30.q.out +++ ql/src/test/results/clientpositive/input30.q.out @@ -64,36 +64,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tst_dest30 - Select Operator - expressions: _col0 (type: int) - outputColumnNames: a - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: compute_stats(a, 'hll') - mode: complete - outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - Select Operator - expressions: _col0 (type: struct) - outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - File Output Operator - compressed: false - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tst_dest30 + micromanaged table: true Stage: Stage-2 Stats Work @@ -172,36 +154,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest30 - Select Operator - expressions: _col0 (type: int) - outputColumnNames: a - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: compute_stats(a, 'hll') - mode: complete - outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - Select Operator - expressions: _col0 (type: struct) - outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - File Output Operator - compressed: false - Statistics: Num rows: 1 Data size: 424 Basic stats: COMPLETE Column stats: COMPLETE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest30 + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/inputddl1.q.out ql/src/test/results/clientpositive/inputddl1.q.out index 12d655dec5..59f2690082 100644 --- ql/src/test/results/clientpositive/inputddl1.q.out +++ ql/src/test/results/clientpositive/inputddl1.q.out @@ -16,6 +16,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.INPUTDDL1 + table properties: + transactional true + transactional_properties insert_only PREHOOK: query: CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientpositive/inputddl2.q.out ql/src/test/results/clientpositive/inputddl2.q.out index fbb2ecebf0..c142a50986 100644 --- ql/src/test/results/clientpositive/inputddl2.q.out +++ ql/src/test/results/clientpositive/inputddl2.q.out @@ -17,6 +17,9 @@ STAGE PLANS: partition columns: ds string, country string serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.INPUTDDL2 + table properties: + transactional true + transactional_properties insert_only PREHOOK: query: CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE diff --git ql/src/test/results/clientpositive/llap/autoColumnStats_1.q.out ql/src/test/results/clientpositive/llap/autoColumnStats_1.q.out index f93f666542..dd6c5db5af 100644 --- ql/src/test/results/clientpositive/llap/autoColumnStats_1.q.out +++ ql/src/test/results/clientpositive/llap/autoColumnStats_1.q.out @@ -62,6 +62,8 @@ Table Parameters: numRows 500 rawDataSize 5312 totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -138,6 +140,8 @@ Table Parameters: numRows 500 rawDataSize 5312 totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -172,6 +176,8 @@ Table Parameters: numRows 500 rawDataSize 5312 totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -256,6 +262,8 @@ Table Parameters: numRows 500 rawDataSize 5312 totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -290,6 +298,8 @@ Table Parameters: numRows 500 rawDataSize 5312 totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -348,6 +358,8 @@ Table Parameters: numRows 508 rawDataSize 5400 totalSize 5908 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -400,6 +412,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -1010,6 +1024,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/llap/auto_sortmerge_join_12.q.out ql/src/test/results/clientpositive/llap/auto_sortmerge_join_12.q.out index 0fda4c1f68..eb483454a6 100644 --- ql/src/test/results/clientpositive/llap/auto_sortmerge_join_12.q.out +++ ql/src/test/results/clientpositive/llap/auto_sortmerge_join_12.q.out @@ -213,6 +213,8 @@ STAGE PLANS: serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -279,6 +281,8 @@ STAGE PLANS: serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium @@ -362,6 +366,8 @@ STAGE PLANS: serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -410,6 +416,8 @@ STAGE PLANS: serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -481,6 +489,8 @@ STAGE PLANS: serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium diff --git ql/src/test/results/clientpositive/llap/auto_sortmerge_join_7.q.out ql/src/test/results/clientpositive/llap/auto_sortmerge_join_7.q.out index a88396b69d..6c667d814d 100644 --- ql/src/test/results/clientpositive/llap/auto_sortmerge_join_7.q.out +++ ql/src/test/results/clientpositive/llap/auto_sortmerge_join_7.q.out @@ -191,6 +191,8 @@ STAGE PLANS: serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -239,6 +241,8 @@ STAGE PLANS: serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -327,6 +331,8 @@ STAGE PLANS: serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -375,6 +381,8 @@ STAGE PLANS: serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -516,6 +524,8 @@ STAGE PLANS: serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -564,6 +574,8 @@ STAGE PLANS: serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -652,6 +664,8 @@ STAGE PLANS: serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -700,6 +714,8 @@ STAGE PLANS: serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -841,6 +857,8 @@ STAGE PLANS: serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -889,6 +907,8 @@ STAGE PLANS: serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -977,6 +997,8 @@ STAGE PLANS: serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1025,6 +1047,8 @@ STAGE PLANS: serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git ql/src/test/results/clientpositive/llap/bucket3.q.out ql/src/test/results/clientpositive/llap/bucket3.q.out index 9ae5166ded..f38f0826b5 100644 --- ql/src/test/results/clientpositive/llap/bucket3.q.out +++ ql/src/test/results/clientpositive/llap/bucket3.q.out @@ -73,6 +73,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -94,6 +96,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -133,10 +137,13 @@ STAGE PLANS: serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket3_1 TotalFiles: 2 + Write Type: INSERT GatherStats: true MultiFileSpray: true Select Operator @@ -203,7 +210,7 @@ STAGE PLANS: tables: partition: ds 1 - replace: true + replace: false #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -222,9 +229,12 @@ STAGE PLANS: serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket3_1 + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out index b84edde144..ef4be8cb5a 100644 --- ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out +++ ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out @@ -1778,7 +1778,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 2 Map Operator Tree: TableScan @@ -1809,7 +1809,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out index b3ffda2679..6d07207f18 100644 --- ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out +++ ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out @@ -145,6 +145,8 @@ STAGE PLANS: serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -220,6 +222,8 @@ STAGE PLANS: serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 diff --git ql/src/test/results/clientpositive/llap/cbo_rp_udf_udaf.q.out ql/src/test/results/clientpositive/llap/cbo_rp_udf_udaf.q.out index c5270f76fb..696d32085e 100644 --- ql/src/test/results/clientpositive/llap/cbo_rp_udf_udaf.q.out +++ ql/src/test/results/clientpositive/llap/cbo_rp_udf_udaf.q.out @@ -47,10 +47,12 @@ POSTHOOK: Input: default@cbo_t1@dt=2014 PREHOOK: query: select f,a,e,b from (select count(*) as a, count(c_int) as b, sum(c_int) as c, avg(c_int) as d, max(c_int) as e, min(c_int) as f from cbo_t1) cbo_t1 PREHOOK: type: QUERY PREHOOK: Input: default@cbo_t1 +PREHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### POSTHOOK: query: select f,a,e,b from (select count(*) as a, count(c_int) as b, sum(c_int) as c, avg(c_int) as d, max(c_int) as e, min(c_int) as f from cbo_t1) cbo_t1 POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t1 +POSTHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### 1 20 1 18 PREHOOK: query: select f,a,e,b from (select count(*) as a, count(distinct c_int) as b, sum(distinct c_int) as c, avg(distinct c_int) as d, max(distinct c_int) as e, min(distinct c_int) as f from cbo_t1) cbo_t1 diff --git ql/src/test/results/clientpositive/llap/cbo_rp_udf_udaf_stats_opt.q.out ql/src/test/results/clientpositive/llap/cbo_rp_udf_udaf_stats_opt.q.out index 4cebe36beb..a4f71cb56a 100644 --- ql/src/test/results/clientpositive/llap/cbo_rp_udf_udaf_stats_opt.q.out +++ ql/src/test/results/clientpositive/llap/cbo_rp_udf_udaf_stats_opt.q.out @@ -48,10 +48,12 @@ POSTHOOK: Input: default@cbo_t1@dt=2014 PREHOOK: query: select f,a,e,b from (select count(*) as a, count(c_int) as b, sum(c_int) as c, avg(c_int) as d, max(c_int) as e, min(c_int) as f from cbo_t1) cbo_t1 PREHOOK: type: QUERY PREHOOK: Input: default@cbo_t1 +PREHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### POSTHOOK: query: select f,a,e,b from (select count(*) as a, count(c_int) as b, sum(c_int) as c, avg(c_int) as d, max(c_int) as e, min(c_int) as f from cbo_t1) cbo_t1 POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t1 +POSTHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### 1 20 1 18 PREHOOK: query: select f,a,e,b from (select count(*) as a, count(distinct c_int) as b, sum(distinct c_int) as c, avg(distinct c_int) as d, max(distinct c_int) as e, min(distinct c_int) as f from cbo_t1) cbo_t1 diff --git ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out index 2b87d502a2..1ad99f3904 100644 --- ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out +++ ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out @@ -29,6 +29,8 @@ Table Parameters: numRows 0 rawDataSize 0 totalSize 0 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out index d4dd1a6a0c..4030c9b236 100644 --- ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out +++ ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out @@ -94,6 +94,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart_date + Write Type: INSERT Stage: Stage-2 Dependency Collection @@ -106,6 +107,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart_date + table properties: + transactional true + transactional_properties insert_only Stage: Stage-3 Stats Work @@ -116,6 +120,7 @@ STAGE PLANS: files: hdfs directory: true #### A masked pattern was here #### + Write Type: INSERT PREHOOK: query: create table srcpart_date as select ds as ds, ds as `date` from srcpart group by ds PREHOOK: type: CREATETABLE_AS_SELECT @@ -5925,7 +5930,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan diff --git ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_sw.q.out ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_sw.q.out index 81b6bded11..7762767b5e 100644 --- ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_sw.q.out +++ ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_sw.q.out @@ -240,7 +240,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 10 Map Operator Tree: TableScan @@ -260,7 +260,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 9174 Data size: 643900 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 11 Map Operator Tree: TableScan @@ -293,7 +293,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 13 Map Operator Tree: TableScan @@ -313,7 +313,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 9174 Data size: 643900 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 6 Map Operator Tree: TableScan @@ -364,7 +364,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 639 Basic stats: PARTIAL Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 12 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/groupby1.q.out ql/src/test/results/clientpositive/llap/groupby1.q.out index d58a9fd69f..05e9090fdf 100644 --- ql/src/test/results/clientpositive/llap/groupby1.q.out +++ ql/src/test/results/clientpositive/llap/groupby1.q.out @@ -80,6 +80,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g1 + Write Type: INSERT Stage: Stage-2 Dependency Collection @@ -87,12 +88,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_g1 + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out index 061b0d710e..1cfb36aaed 100644 --- ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out +++ ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out @@ -170,7 +170,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: vectorized, llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/hybridgrace_hashjoin_1.q.out ql/src/test/results/clientpositive/llap/hybridgrace_hashjoin_1.q.out index 02dc49d13f..107caf678a 100644 --- ql/src/test/results/clientpositive/llap/hybridgrace_hashjoin_1.q.out +++ ql/src/test/results/clientpositive/llap/hybridgrace_hashjoin_1.q.out @@ -67,7 +67,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -86,7 +86,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 12288 Data size: 36696 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -188,7 +188,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -207,7 +207,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 12288 Data size: 36696 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -307,7 +307,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -326,7 +326,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 9173 Data size: 27396 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -424,7 +424,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -443,7 +443,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 9173 Data size: 27396 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -538,7 +538,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -554,7 +554,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 12288 Data size: 36696 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -649,7 +649,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -665,7 +665,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 12288 Data size: 36696 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1304,7 +1304,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 551 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(20,10)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -1322,7 +1322,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 551 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(23,14)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1507,7 +1507,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 551 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(20,10)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -1525,7 +1525,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 551 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(23,14)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/insert1.q.out ql/src/test/results/clientpositive/llap/insert1.q.out index 315c660a57..8c580a75a4 100644 --- ql/src/test/results/clientpositive/llap/insert1.q.out +++ ql/src/test/results/clientpositive/llap/insert1.q.out @@ -62,6 +62,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.insert1 + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string) outputColumnNames: key, value @@ -105,6 +106,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.insert1 + micromanaged table: true Stage: Stage-3 Stats Work @@ -152,6 +154,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.insert1 + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string) outputColumnNames: key, value @@ -195,6 +198,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.insert1 + micromanaged table: true Stage: Stage-3 Stats Work @@ -256,6 +260,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: x.insert1 + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string) outputColumnNames: key, value @@ -299,6 +304,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: x.insert1 + micromanaged table: true Stage: Stage-3 Stats Work @@ -346,6 +352,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.insert1 + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string) outputColumnNames: key, value @@ -389,6 +396,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.insert1 + micromanaged table: true Stage: Stage-3 Stats Work @@ -445,6 +453,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.insert1 + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string) outputColumnNames: key, value @@ -473,6 +482,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: x.insert1 + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string) outputColumnNames: key, value @@ -531,6 +541,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.insert1 + micromanaged table: true Stage: Stage-4 Stats Work @@ -543,12 +554,13 @@ STAGE PLANS: Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: x.insert1 + micromanaged table: true Stage: Stage-5 Stats Work diff --git ql/src/test/results/clientpositive/llap/limit_pushdown.q.out ql/src/test/results/clientpositive/llap/limit_pushdown.q.out index b067e45e2e..5ff00a8ba2 100644 --- ql/src/test/results/clientpositive/llap/limit_pushdown.q.out +++ ql/src/test/results/clientpositive/llap/limit_pushdown.q.out @@ -403,7 +403,7 @@ STAGE PLANS: Statistics: Num rows: 5570 Data size: 33272 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.3 Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -495,7 +495,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Statistics: Num rows: 6144 Data size: 55052 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -593,7 +593,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Statistics: Num rows: 6144 Data size: 55052 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -693,7 +693,7 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 1976458 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.3 Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/limit_pushdown3.q.out ql/src/test/results/clientpositive/llap/limit_pushdown3.q.out index 470cffe7cc..e9fcf3e1b5 100644 --- ql/src/test/results/clientpositive/llap/limit_pushdown3.q.out +++ ql/src/test/results/clientpositive/llap/limit_pushdown3.q.out @@ -432,7 +432,7 @@ STAGE PLANS: Statistics: Num rows: 5570 Data size: 33272 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.3 Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -537,7 +537,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Statistics: Num rows: 6144 Data size: 55052 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -649,7 +649,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Statistics: Num rows: 6144 Data size: 55052 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -763,7 +763,7 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 1976458 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.3 Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/list_bucket_dml_10.q.out ql/src/test/results/clientpositive/llap/list_bucket_dml_10.q.out index 2bf96d0aae..636def3acf 100644 --- ql/src/test/results/clientpositive/llap/list_bucket_dml_10.q.out +++ ql/src/test/results/clientpositive/llap/list_bucket_dml_10.q.out @@ -70,10 +70,13 @@ STAGE PLANS: serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part TotalFiles: 1 + Write Type: INSERT GatherStats: true MultiFileSpray: false Select Operator @@ -121,6 +124,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -142,6 +147,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -194,7 +201,7 @@ STAGE PLANS: partition: ds 2008-04-08 hr 11 - replace: true + replace: false #### A masked pattern was here #### table: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -212,9 +219,12 @@ STAGE PLANS: serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/llap_partitioned.q.out ql/src/test/results/clientpositive/llap/llap_partitioned.q.out index 350179ed22..690d2b971c 100644 --- ql/src/test/results/clientpositive/llap/llap_partitioned.q.out +++ ql/src/test/results/clientpositive/llap/llap_partitioned.q.out @@ -1675,7 +1675,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1741,7 +1741,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE Target Vertex: Map 1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out index 2c13d5d6f6..96fef078c9 100644 --- ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out +++ ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out @@ -87,7 +87,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -129,7 +129,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 12288 Data size: 36696 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -266,7 +266,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -308,7 +308,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 12288 Data size: 36696 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/load_dyn_part3.q.out ql/src/test/results/clientpositive/llap/load_dyn_part3.q.out index c9823ce853..5ba54550e1 100644 --- ql/src/test/results/clientpositive/llap/load_dyn_part3.q.out +++ ql/src/test/results/clientpositive/llap/load_dyn_part3.q.out @@ -70,6 +70,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part3 + Write Type: INSERT Select Operator expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) outputColumnNames: key, value, ds, hr @@ -118,12 +119,13 @@ STAGE PLANS: partition: ds hr - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part3 + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/load_dyn_part5.q.out ql/src/test/results/clientpositive/llap/load_dyn_part5.q.out index 7223808a14..e325421ae2 100644 --- ql/src/test/results/clientpositive/llap/load_dyn_part5.q.out +++ ql/src/test/results/clientpositive/llap/load_dyn_part5.q.out @@ -57,6 +57,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part5 + Write Type: INSERT Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: key, value @@ -104,12 +105,13 @@ STAGE PLANS: tables: partition: value - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.nzhang_part5 + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/mapjoin_decimal.q.out ql/src/test/results/clientpositive/llap/mapjoin_decimal.q.out index 0ffe74e203..0dee5dc886 100644 --- ql/src/test/results/clientpositive/llap/mapjoin_decimal.q.out +++ ql/src/test/results/clientpositive/llap/mapjoin_decimal.q.out @@ -119,7 +119,7 @@ STAGE PLANS: Statistics: Num rows: 1101 Data size: 246624 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(4,0)) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -138,7 +138,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(6,2)) Statistics: Num rows: 1049 Data size: 117488 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/mapjoin_hint.q.out ql/src/test/results/clientpositive/llap/mapjoin_hint.q.out index 57aea2cb98..2caa3992e5 100644 --- ql/src/test/results/clientpositive/llap/mapjoin_hint.q.out +++ ql/src/test/results/clientpositive/llap/mapjoin_hint.q.out @@ -158,7 +158,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 2000 Data size: 174000 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -191,7 +191,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 639 Basic stats: PARTIAL Column stats: PARTIAL value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -294,7 +294,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: PARTIAL Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -314,7 +314,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 20 Data size: 1740 Basic stats: PARTIAL Column stats: PARTIAL Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out index b6bc569f19..ee927798db 100644 --- ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out +++ ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out @@ -407,7 +407,7 @@ STAGE PLANS: Statistics: Num rows: 5570 Data size: 33272 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.3 Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -500,7 +500,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Statistics: Num rows: 6144 Data size: 55052 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -599,7 +599,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Statistics: Num rows: 6144 Data size: 55052 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -700,7 +700,7 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 1976458 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.3 Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/orc_llap_nonvector.q.out ql/src/test/results/clientpositive/llap/orc_llap_nonvector.q.out index 90c70ef9a5..a39b120bf0 100644 --- ql/src/test/results/clientpositive/llap/orc_llap_nonvector.q.out +++ ql/src/test/results/clientpositive/llap/orc_llap_nonvector.q.out @@ -100,7 +100,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator @@ -251,7 +251,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out index 9f20f77cb0..1393c60891 100644 --- ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out +++ ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out @@ -148,7 +148,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -206,7 +206,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -339,7 +339,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -407,7 +407,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -602,7 +602,7 @@ STAGE PLANS: sort order: ++ Statistics: Num rows: 56 Data size: 5656 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -668,7 +668,7 @@ STAGE PLANS: sort order: ++ Statistics: Num rows: 56 Data size: 5656 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -801,7 +801,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -880,7 +880,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1025,7 +1025,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1125,7 +1125,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/ppd_union_view.q.out ql/src/test/results/clientpositive/llap/ppd_union_view.q.out index b80286b1f0..249952a8be 100644 --- ql/src/test/results/clientpositive/llap/ppd_union_view.q.out +++ ql/src/test/results/clientpositive/llap/ppd_union_view.q.out @@ -266,6 +266,8 @@ STAGE PLANS: serialization.ddl struct t1_old { string keymap, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_old @@ -347,6 +349,8 @@ STAGE PLANS: serialization.ddl struct t1_mapping { string key, string keymap} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_mapping @@ -548,6 +552,8 @@ STAGE PLANS: serialization.ddl struct t1_new { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_new diff --git ql/src/test/results/clientpositive/llap/ptf.q.out ql/src/test/results/clientpositive/llap/ptf.q.out index 8836a424ff..7f917bbdba 100644 --- ql/src/test/results/clientpositive/llap/ptf.q.out +++ ql/src/test/results/clientpositive/llap/ptf.q.out @@ -3071,6 +3071,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_4 + Write Type: INSERT Select Operator expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: double) outputColumnNames: p_mfgr, p_name, p_size, r, dr, s @@ -3196,6 +3197,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_5 + Write Type: INSERT Select Operator expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: double), _col7 (type: int) outputColumnNames: p_mfgr, p_name, p_size, s2, r, dr, cud, fv1 @@ -3231,12 +3233,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_4 + micromanaged table: true Stage: Stage-4 Stats Work @@ -3249,12 +3252,13 @@ STAGE PLANS: Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_5 + micromanaged table: true Stage: Stage-5 Stats Work diff --git ql/src/test/results/clientpositive/llap/skewjoin.q.out ql/src/test/results/clientpositive/llap/skewjoin.q.out index 67c6e21e33..fa24a91786 100644 --- ql/src/test/results/clientpositive/llap/skewjoin.q.out +++ ql/src/test/results/clientpositive/llap/skewjoin.q.out @@ -155,6 +155,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string) outputColumnNames: key, value @@ -190,12 +191,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/smb_mapjoin_6.q.out ql/src/test/results/clientpositive/llap/smb_mapjoin_6.q.out index 57c8ed0189..7bdd520869 100644 --- ql/src/test/results/clientpositive/llap/smb_mapjoin_6.q.out +++ ql/src/test/results/clientpositive/llap/smb_mapjoin_6.q.out @@ -136,6 +136,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.smb_join_results + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string), _col2 (type: int), _col3 (type: string) outputColumnNames: k1, v1, k2, v2 @@ -171,12 +172,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.smb_join_results + micromanaged table: true Stage: Stage-3 Stats Work @@ -1352,6 +1354,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.smb_join_results + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string), _col2 (type: int), _col3 (type: string) outputColumnNames: k1, v1, k2, v2 @@ -1387,12 +1390,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.smb_join_results + micromanaged table: true Stage: Stage-3 Stats Work @@ -2584,6 +2588,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.smb_join_results + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string), _col2 (type: int), _col3 (type: string) outputColumnNames: k1, v1, k2, v2 @@ -2619,12 +2624,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.smb_join_results + micromanaged table: true Stage: Stage-3 Stats Work @@ -2732,6 +2738,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.smb_join_results + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string), _col2 (type: int), _col3 (type: string) outputColumnNames: k1, v1, k2, v2 @@ -2767,12 +2774,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.smb_join_results + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/tez_dml.q.out ql/src/test/results/clientpositive/llap/tez_dml.q.out index 89b74cf569..02d210a97a 100644 --- ql/src/test/results/clientpositive/llap/tez_dml.q.out +++ ql/src/test/results/clientpositive/llap/tez_dml.q.out @@ -70,6 +70,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmp_src + Write Type: INSERT Stage: Stage-2 Dependency Collection @@ -82,6 +83,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmp_src + table properties: + transactional true + transactional_properties insert_only Stage: Stage-3 Stats Work @@ -92,6 +96,7 @@ STAGE PLANS: files: hdfs directory: true #### A masked pattern was here #### + Write Type: INSERT PREHOOK: query: CREATE TABLE tmp_src AS SELECT * FROM (SELECT value, count(value) AS cnt FROM src GROUP BY value) f1 ORDER BY cnt PREHOOK: type: CREATETABLE_AS_SELECT @@ -465,6 +470,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmp_src_part + Write Type: INSERT Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: c, d @@ -518,6 +524,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmp_src_part + micromanaged table: true Stage: Stage-3 Stats Work @@ -934,6 +941,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.even + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string) outputColumnNames: c, d @@ -962,6 +970,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.odd + Write Type: INSERT Select Operator expressions: _col0 (type: int), _col1 (type: string) outputColumnNames: c, d @@ -1020,6 +1029,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.even + micromanaged table: true Stage: Stage-4 Stats Work @@ -1038,6 +1048,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.odd + micromanaged table: true Stage: Stage-5 Stats Work diff --git ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_1.q.out ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_1.q.out index b63b25fb13..0d52526371 100644 --- ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_1.q.out +++ ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_1.q.out @@ -46,7 +46,7 @@ STAGE PLANS: Statistics: Num rows: 1365 Data size: 343800 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -66,7 +66,7 @@ STAGE PLANS: Statistics: Num rows: 1019 Data size: 256780 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -177,7 +177,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1365 Data size: 4080 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -196,7 +196,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1019 Data size: 9144 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -306,7 +306,7 @@ STAGE PLANS: Statistics: Num rows: 1365 Data size: 8160 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: smallint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 5 Map Operator Tree: TableScan @@ -325,7 +325,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1019 Data size: 9144 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -457,7 +457,7 @@ STAGE PLANS: Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -477,7 +477,7 @@ STAGE PLANS: Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -591,7 +591,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -610,7 +610,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -723,7 +723,7 @@ STAGE PLANS: Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: smallint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 5 Map Operator Tree: TableScan @@ -742,7 +742,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out index 642bda2736..d17abf7b1e 100644 --- ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out +++ ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out @@ -54,7 +54,7 @@ STAGE PLANS: Statistics: Num rows: 4096 Data size: 1031250 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -245,7 +245,7 @@ STAGE PLANS: Statistics: Num rows: 4096 Data size: 1031250 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -436,7 +436,7 @@ STAGE PLANS: Statistics: Num rows: 4096 Data size: 1031250 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan diff --git ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_3.q.out ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_3.q.out index 4f557d3cc3..1b6ca3a02c 100644 --- ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_3.q.out +++ ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_3.q.out @@ -41,7 +41,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -147,7 +147,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan diff --git ql/src/test/results/clientpositive/llap/tez_join_hash.q.out ql/src/test/results/clientpositive/llap/tez_join_hash.q.out index cce289d509..6c0150dc4a 100644 --- ql/src/test/results/clientpositive/llap/tez_join_hash.q.out +++ ql/src/test/results/clientpositive/llap/tez_join_hash.q.out @@ -72,7 +72,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/tez_nway_join.q.out ql/src/test/results/clientpositive/llap/tez_nway_join.q.out index 800f043fff..65aaf53930 100644 --- ql/src/test/results/clientpositive/llap/tez_nway_join.q.out +++ ql/src/test/results/clientpositive/llap/tez_nway_join.q.out @@ -79,7 +79,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -98,7 +98,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -117,7 +117,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -198,7 +198,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -217,7 +217,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -236,7 +236,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -313,7 +313,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -329,7 +329,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 5 Map Operator Tree: TableScan @@ -345,7 +345,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -444,7 +444,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 3 Map Operator Tree: TableScan @@ -460,7 +460,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -476,7 +476,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/tez_union_group_by.q.out ql/src/test/results/clientpositive/llap/tez_union_group_by.q.out index 79321c1460..832e6cc05c 100644 --- ql/src/test/results/clientpositive/llap/tez_union_group_by.q.out +++ ql/src/test/results/clientpositive/llap/tez_union_group_by.q.out @@ -196,7 +196,7 @@ STAGE PLANS: Map-reduce partition columns: t (type: string), st (type: string) Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE Column stats: NONE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 5 Map Operator Tree: TableScan diff --git ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_1.q.out ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_1.q.out index d0a5e62d72..34543d4d35 100644 --- ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_1.q.out +++ ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_1.q.out @@ -46,7 +46,7 @@ STAGE PLANS: Statistics: Num rows: 1365 Data size: 343800 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -66,7 +66,7 @@ STAGE PLANS: Statistics: Num rows: 1019 Data size: 256780 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -177,7 +177,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1365 Data size: 4080 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -196,7 +196,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1019 Data size: 9144 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -306,7 +306,7 @@ STAGE PLANS: Statistics: Num rows: 1365 Data size: 8160 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: smallint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 5 Map Operator Tree: TableScan @@ -325,7 +325,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1019 Data size: 9144 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -457,7 +457,7 @@ STAGE PLANS: Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -477,7 +477,7 @@ STAGE PLANS: Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: vectorized, llap Reduce Operator Tree: @@ -591,7 +591,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -610,7 +610,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: vectorized, llap Reduce Operator Tree: @@ -723,7 +723,7 @@ STAGE PLANS: Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: smallint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 5 Map Operator Tree: TableScan @@ -742,7 +742,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1365 Data size: 293479 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: vectorized, llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out index 1ce7a3a37c..7eac58060a 100644 --- ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out +++ ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out @@ -54,7 +54,7 @@ STAGE PLANS: Statistics: Num rows: 4096 Data size: 1031250 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -245,7 +245,7 @@ STAGE PLANS: Statistics: Num rows: 4096 Data size: 1031250 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -436,7 +436,7 @@ STAGE PLANS: Statistics: Num rows: 4096 Data size: 1031250 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan diff --git ql/src/test/results/clientpositive/llap/union4.q.out ql/src/test/results/clientpositive/llap/union4.q.out index 747c280c07..6d96d1c8c4 100644 --- ql/src/test/results/clientpositive/llap/union4.q.out +++ ql/src/test/results/clientpositive/llap/union4.q.out @@ -94,6 +94,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + Write Type: INSERT Select Operator expressions: _col0 (type: string), _col1 (type: int) outputColumnNames: key, value @@ -146,6 +147,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + Write Type: INSERT Select Operator expressions: _col0 (type: string), _col1 (type: int) outputColumnNames: key, value @@ -168,12 +170,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/union6.q.out ql/src/test/results/clientpositive/llap/union6.q.out index 068ec75719..622ea62231 100644 --- ql/src/test/results/clientpositive/llap/union6.q.out +++ ql/src/test/results/clientpositive/llap/union6.q.out @@ -69,6 +69,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + Write Type: INSERT Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: key, value @@ -104,6 +105,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + Write Type: INSERT Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: key, value @@ -141,12 +143,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/union_stats.q.out ql/src/test/results/clientpositive/llap/union_stats.q.out index ba2700c80d..ebfc2d3b32 100644 --- ql/src/test/results/clientpositive/llap/union_stats.q.out +++ ql/src/test/results/clientpositive/llap/union_stats.q.out @@ -44,9 +44,12 @@ STAGE PLANS: name default.t serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t TotalFiles: 1 + Write Type: INSERT GatherStats: true MultiFileSpray: false Execution mode: llap @@ -75,6 +78,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -96,6 +101,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -128,9 +135,12 @@ STAGE PLANS: name default.t serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t TotalFiles: 1 + Write Type: INSERT GatherStats: true MultiFileSpray: false Execution mode: llap @@ -159,6 +169,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -180,6 +192,8 @@ STAGE PLANS: serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src @@ -200,6 +214,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t + table properties: + transactional true + transactional_properties insert_only Stage: Stage-3 Stats Work @@ -211,6 +228,7 @@ STAGE PLANS: files: hdfs directory: true #### A masked pattern was here #### + Write Type: INSERT PREHOOK: query: create table t as select * from src union all select * from src PREHOOK: type: CREATETABLE_AS_SELECT @@ -255,6 +273,8 @@ Table Parameters: numRows 1000 rawDataSize 10624 totalSize 11624 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -303,6 +323,8 @@ Table Parameters: numRows 1500 rawDataSize 15936 totalSize 17436 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -359,6 +381,8 @@ Table Parameters: numRows 1500 rawDataSize 15936 totalSize 17436 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -427,6 +451,8 @@ Table Parameters: numRows 1000 rawDataSize 10624 totalSize 11624 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -461,6 +487,8 @@ Table Parameters: numRows 1000 rawDataSize 10624 totalSize 11624 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out index ae58464a90..737912e915 100644 --- ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out +++ ql/src/test/results/clientpositive/llap/vector_adaptor_usage_mode.q.out @@ -141,7 +141,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -219,7 +219,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -297,7 +297,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -375,7 +375,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -466,7 +466,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -560,7 +560,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -628,7 +628,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -735,7 +735,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -800,7 +800,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -907,7 +907,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1001,7 +1001,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 192 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1127,7 +1127,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 192 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out index 476670bb14..99050c680c 100644 --- ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out +++ ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out @@ -162,7 +162,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 736 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(38,18)), _col1 (type: decimal(38,18)), _col2 (type: decimal(38,18)), _col3 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -293,7 +293,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), _col3 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -424,7 +424,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 240 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: timestamp), _col1 (type: timestamp), _col2 (type: double), _col3 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out index 300e07bc39..3ea26bfe54 100644 --- ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out +++ ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out @@ -99,7 +99,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out index 54216fa6bc..8c8ae4ded5 100644 --- ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out +++ ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out @@ -1263,7 +1263,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1306,7 +1306,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1908,6 +1908,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + Write Type: INSERT File Output Operator compressed: false Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE @@ -1916,6 +1917,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 + Write Type: INSERT Execution mode: llap Stage: Stage-3 @@ -1924,12 +1926,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + micromanaged table: true Stage: Stage-4 Stats Work @@ -1938,12 +1941,13 @@ STAGE PLANS: Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 + micromanaged table: true Stage: Stage-5 Stats Work @@ -2123,6 +2127,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + Write Type: INSERT Select Operator expressions: _col0 (type: int) outputColumnNames: _col0 @@ -2182,6 +2187,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 + Write Type: INSERT Stage: Stage-3 Dependency Collection @@ -2189,12 +2195,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + micromanaged table: true Stage: Stage-4 Stats Work @@ -2203,12 +2210,13 @@ STAGE PLANS: Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 + micromanaged table: true Stage: Stage-5 Stats Work diff --git ql/src/test/results/clientpositive/llap/vector_between_columns.q.out ql/src/test/results/clientpositive/llap/vector_between_columns.q.out index 48d5275fb0..9ef82c57a5 100644 --- ql/src/test/results/clientpositive/llap/vector_between_columns.q.out +++ ql/src/test/results/clientpositive/llap/vector_between_columns.q.out @@ -108,7 +108,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 40 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -142,7 +142,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 40 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: smallint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -266,7 +266,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 40 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -300,7 +300,7 @@ STAGE PLANS: Statistics: Num rows: 5 Data size: 40 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: smallint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_between_in.q.out ql/src/test/results/clientpositive/llap/vector_between_in.q.out index e02f64cb8b..eee0bcd951 100644 --- ql/src/test/results/clientpositive/llap/vector_between_in.q.out +++ ql/src/test/results/clientpositive/llap/vector_between_in.q.out @@ -63,7 +63,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 10 Data size: 532 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -168,7 +168,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -267,7 +267,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 15 Data size: 1596 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -372,7 +372,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 120 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -471,7 +471,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 1365 Data size: 72626 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -566,7 +566,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 10923 Data size: 581173 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -661,7 +661,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 1365 Data size: 145253 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -766,7 +766,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 120 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1121,7 +1121,7 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 653800 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1259,7 +1259,7 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 1307600 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1397,7 +1397,7 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 653800 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1535,7 +1535,7 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 1307600 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out index aabfc732a9..f74e20e4fa 100644 --- ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out +++ ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out @@ -189,7 +189,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -232,7 +232,7 @@ STAGE PLANS: Statistics: Num rows: 100 Data size: 34084 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: boolean), _col7 (type: string), _col8 (type: timestamp), _col9 (type: decimal(4,2)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -387,7 +387,7 @@ STAGE PLANS: Statistics: Num rows: 50 Data size: 3200 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -590,7 +590,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -633,7 +633,7 @@ STAGE PLANS: Statistics: Num rows: 100 Data size: 6000 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_bround.q.out ql/src/test/results/clientpositive/llap/vector_bround.q.out index 5ef8695f20..f37327d0a7 100644 --- ql/src/test/results/clientpositive/llap/vector_bround.q.out +++ ql/src/test/results/clientpositive/llap/vector_bround.q.out @@ -79,7 +79,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_bucket.q.out ql/src/test/results/clientpositive/llap/vector_bucket.q.out index 146014f19f..dc0ee764ec 100644 --- ql/src/test/results/clientpositive/llap/vector_bucket.q.out +++ ql/src/test/results/clientpositive/llap/vector_bucket.q.out @@ -95,6 +95,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.non_orc_table + Write Type: INSERT Stage: Stage-2 Dependency Collection @@ -108,6 +109,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.non_orc_table + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out index 22b772279b..29a9d5d144 100644 --- ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out +++ ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out @@ -167,7 +167,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_char_2.q.out ql/src/test/results/clientpositive/llap/vector_char_2.q.out index 827ec2e9c7..94a8e44f6c 100644 --- ql/src/test/results/clientpositive/llap/vector_char_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_char_2.q.out @@ -118,7 +118,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint), _col2 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -314,7 +314,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint), _col2 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_char_4.q.out ql/src/test/results/clientpositive/llap/vector_char_4.q.out index ba704350e7..b52ef62233 100644 --- ql/src/test/results/clientpositive/llap/vector_char_4.q.out +++ ql/src/test/results/clientpositive/llap/vector_char_4.q.out @@ -169,8 +169,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe name: default.char_lazy_binary_columnar + Write Type: INSERT Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -187,12 +188,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe name: default.char_lazy_binary_columnar + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out index 72cd1d3dde..6f4fce8a06 100644 --- ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out +++ ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out @@ -191,7 +191,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: char(10)), _col2 (type: int), _col3 (type: char(10)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -234,7 +234,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -345,7 +345,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -401,7 +401,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: char(10)), _col2 (type: int), _col3 (type: char(20)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -515,7 +515,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: char(10)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -571,7 +571,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: char(10)), _col2 (type: int), _col3 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_char_simple.q.out ql/src/test/results/clientpositive/llap/vector_char_simple.q.out index 696359baab..ba0f00bb30 100644 --- ql/src/test/results/clientpositive/llap/vector_char_simple.q.out +++ ql/src/test/results/clientpositive/llap/vector_char_simple.q.out @@ -71,7 +71,7 @@ STAGE PLANS: Vertices: Map 1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -157,7 +157,7 @@ STAGE PLANS: Vertices: Map 1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -255,7 +255,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_coalesce.q.out ql/src/test/results/clientpositive/llap/vector_coalesce.q.out index 339df62adc..0c2d5dd12a 100644 --- ql/src/test/results/clientpositive/llap/vector_coalesce.q.out +++ ql/src/test/results/clientpositive/llap/vector_coalesce.q.out @@ -42,7 +42,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -150,7 +150,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -258,7 +258,7 @@ STAGE PLANS: className: VectorFileSinkOperator native: false Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -342,7 +342,7 @@ STAGE PLANS: native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -445,7 +445,7 @@ STAGE PLANS: className: VectorFileSinkOperator native: false Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -527,7 +527,7 @@ STAGE PLANS: className: VectorFileSinkOperator native: false Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out index 0f16d60d3b..200a0e9e52 100644 --- ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_coalesce_2.q.out @@ -64,7 +64,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 186 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -146,7 +146,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator @@ -237,7 +237,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 186 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -360,7 +360,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out index de42651b67..76415e29f6 100644 --- ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out +++ ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out @@ -80,7 +80,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 2 Map Operator Tree: TableScan @@ -97,7 +97,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/vector_complex_join.q.out ql/src/test/results/clientpositive/llap/vector_complex_join.q.out index 3eb269b4c9..ddb43b8a4a 100644 --- ql/src/test/results/clientpositive/llap/vector_complex_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_complex_join.q.out @@ -87,7 +87,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -130,7 +130,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 744 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: map) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -264,7 +264,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -300,7 +300,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 124 Basic stats: COMPLETE Column stats: NONE value expressions: a (type: array), index (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -399,7 +399,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -435,7 +435,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 124 Basic stats: COMPLETE Column stats: NONE value expressions: a (type: array), index (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_count.q.out ql/src/test/results/clientpositive/llap/vector_count.q.out index 400d930969..7d60de89dc 100644 --- ql/src/test/results/clientpositive/llap/vector_count.q.out +++ ql/src/test/results/clientpositive/llap/vector_count.q.out @@ -104,7 +104,7 @@ STAGE PLANS: Statistics: Num rows: 7 Data size: 112 Basic stats: COMPLETE Column stats: NONE value expressions: _col5 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -195,7 +195,7 @@ STAGE PLANS: Statistics: Num rows: 7 Data size: 112 Basic stats: COMPLETE Column stats: NONE value expressions: _col4 (type: bigint), _col5 (type: bigint), _col6 (type: bigint), _col7 (type: bigint), _col8 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -289,7 +289,7 @@ STAGE PLANS: Statistics: Num rows: 7 Data size: 112 Basic stats: COMPLETE Column stats: NONE value expressions: d (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -384,7 +384,7 @@ STAGE PLANS: nativeConditionsNotMet: No DISTINCT columns IS false Statistics: Num rows: 7 Data size: 112 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out index 90086ea529..d75f146a08 100644 --- ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out +++ ql/src/test/results/clientpositive/llap/vector_count_distinct.q.out @@ -1285,7 +1285,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 169 Data size: 676 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_data_types.q.out ql/src/test/results/clientpositive/llap/vector_data_types.q.out index 06b50bb461..767e6af242 100644 --- ql/src/test/results/clientpositive/llap/vector_data_types.q.out +++ ql/src/test/results/clientpositive/llap/vector_data_types.q.out @@ -131,7 +131,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: boolean), _col7 (type: string), _col8 (type: timestamp), _col9 (type: decimal(4,2)), _col10 (type: binary) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -241,7 +241,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: boolean), _col7 (type: string), _col8 (type: timestamp), _col9 (type: decimal(4,2)), _col10 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_date_1.q.out ql/src/test/results/clientpositive/llap/vector_date_1.q.out index 1e3d2b3a28..892af34350 100644 --- ql/src/test/results/clientpositive/llap/vector_date_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_date_1.q.out @@ -101,7 +101,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 336 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: date), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: vectorized, llap Reduce Operator Tree: @@ -211,7 +211,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 336 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: date), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: vectorized, llap Reduce Operator Tree: @@ -321,7 +321,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: vectorized, llap Reduce Operator Tree: @@ -431,7 +431,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: vectorized, llap Reduce Operator Tree: @@ -542,7 +542,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: date) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: vectorized, llap Reduce Operator Tree: @@ -653,7 +653,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator @@ -746,7 +746,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out index 4f1b509c54..221d8c9c3c 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out @@ -82,7 +82,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -198,7 +198,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -314,7 +314,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -430,7 +430,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -546,7 +546,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -662,7 +662,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -778,7 +778,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -894,7 +894,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1010,7 +1010,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out index 51c0854e43..26d4514ddf 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out @@ -82,7 +82,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out index 32e2088593..51a00d5ced 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out @@ -95,7 +95,7 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 2661900 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14)), _col9 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -274,7 +274,7 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 2661900 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct), _col8 (type: bigint), _col9 (type: decimal(23,14)), _col10 (type: decimal(23,14)), _col11 (type: decimal(33,14)), _col12 (type: struct), _col13 (type: struct), _col14 (type: struct), _col15 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out index 4d86734094..1648c6956e 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out @@ -56,7 +56,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out index d63eeb7bf0..89d4489385 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out @@ -73,7 +73,7 @@ STAGE PLANS: Statistics: Num rows: 455 Data size: 100294 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -233,7 +233,7 @@ STAGE PLANS: Statistics: Num rows: 455 Data size: 100294 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out index aca8dc0138..b0976d54cc 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out @@ -148,7 +148,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -199,7 +199,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 997 Data size: 106235 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -412,7 +412,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -464,7 +464,7 @@ STAGE PLANS: Statistics: Num rows: 997 Data size: 212470 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: decimal(24,0)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out index 270b6349e6..2e37896e85 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_math_funcs.q.out @@ -134,7 +134,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -380,7 +380,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out index 2fc5277fd6..f6d7b2a6bf 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out @@ -608,7 +608,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 400 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct), _col1 (type: decimal(30,10)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out index 523d59ff8d..286edf6e76 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out @@ -601,7 +601,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(11,0)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -718,7 +718,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,0)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out index b6eac0c670..1336760a41 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out @@ -83,7 +83,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(21,0)), _col2 (type: decimal(22,1)), _col3 (type: decimal(23,2)), _col4 (type: decimal(24,3)), _col5 (type: decimal(21,0)), _col6 (type: decimal(21,0)), _col7 (type: decimal(21,0)), _col8 (type: decimal(21,0)), _col9 (type: decimal(21,0)), _col10 (type: decimal(21,0)), _col11 (type: decimal(21,0)), _col12 (type: decimal(21,0)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -249,7 +249,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2240 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(21,0)), _col2 (type: decimal(22,1)), _col3 (type: decimal(23,2)), _col4 (type: decimal(24,3)), _col5 (type: decimal(25,4)), _col6 (type: decimal(21,0)), _col7 (type: decimal(21,0)), _col8 (type: decimal(21,0)), _col9 (type: decimal(21,0)), _col10 (type: decimal(21,0)), _col11 (type: decimal(21,0)), _col12 (type: decimal(22,1)), _col13 (type: decimal(23,2)), _col14 (type: decimal(24,3)), _col15 (type: decimal(25,4)), _col16 (type: decimal(21,0)), _col17 (type: decimal(21,0)), _col18 (type: decimal(21,0)), _col19 (type: decimal(21,0)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -442,7 +442,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 3808 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(21,0)), _col2 (type: decimal(21,0)), _col3 (type: decimal(21,0)), _col4 (type: decimal(21,0)), _col5 (type: decimal(21,0)), _col6 (type: decimal(21,0)), _col7 (type: decimal(21,0)), _col8 (type: decimal(21,0)), _col9 (type: decimal(21,0)), _col10 (type: decimal(21,0)), _col11 (type: decimal(21,0)), _col12 (type: decimal(21,0)), _col13 (type: decimal(21,0)), _col14 (type: decimal(21,0)), _col15 (type: decimal(21,0)), _col16 (type: decimal(21,0)), _col17 (type: decimal(22,1)), _col18 (type: decimal(23,2)), _col19 (type: decimal(24,3)), _col20 (type: decimal(25,4)), _col21 (type: decimal(26,5)), _col22 (type: decimal(27,6)), _col23 (type: decimal(28,7)), _col24 (type: decimal(29,8)), _col25 (type: decimal(30,9)), _col26 (type: decimal(31,10)), _col27 (type: decimal(32,11)), _col28 (type: decimal(33,12)), _col29 (type: decimal(34,13)), _col31 (type: decimal(35,14)), _col32 (type: decimal(36,15)), _col33 (type: decimal(37,16)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -624,7 +624,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(30,9)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out index dedad2422f..30d66b3d75 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out @@ -115,7 +115,7 @@ STAGE PLANS: Statistics: Num rows: 30 Data size: 4936 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(10,4)), _col2 (type: decimal(15,8)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out index 90f68f5bba..362e7859f3 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_udf2.q.out @@ -104,7 +104,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -200,7 +200,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out index 73d04a975c..8da66f03a5 100644 --- ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_distinct_2.q.out @@ -160,7 +160,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 2000 Data size: 357388 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_elt.q.out ql/src/test/results/clientpositive/llap/vector_elt.q.out index 24a1a65022..ce94d94b27 100644 --- ql/src/test/results/clientpositive/llap/vector_elt.q.out +++ ql/src/test/results/clientpositive/llap/vector_elt.q.out @@ -59,7 +59,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -170,7 +170,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out index 127d8ada06..4c6e36550d 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_3.q.out @@ -163,7 +163,7 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 372596 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out index 7224d5994b..5bc6669769 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out @@ -90,7 +90,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -249,7 +249,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -408,7 +408,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 6624 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -561,7 +561,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 6624 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -714,7 +714,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -874,7 +874,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out index 24c7ce7f76..b4cf60c01b 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out @@ -95,7 +95,7 @@ STAGE PLANS: Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -297,7 +297,7 @@ STAGE PLANS: Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -508,7 +508,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -815,7 +815,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1131,7 +1131,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1500,7 +1500,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1849,7 +1849,7 @@ STAGE PLANS: Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -2014,7 +2014,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -2243,7 +2243,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out index da4b81f369..8e7af7f76b 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out @@ -109,7 +109,7 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -277,7 +277,7 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 96 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out index d2b738bc32..78c315fa77 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out @@ -109,7 +109,7 @@ STAGE PLANS: Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -271,7 +271,7 @@ STAGE PLANS: Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -433,7 +433,7 @@ STAGE PLANS: Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -595,7 +595,7 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 4416 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -750,7 +750,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 9936 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -906,7 +906,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 6 Data size: 1104 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1048,7 +1048,7 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out index 1877bba0c6..a2b968dd2a 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out @@ -95,7 +95,7 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -276,7 +276,7 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -460,7 +460,7 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 3312 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: double) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -680,7 +680,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 534 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out index 7c9f668d51..06e6843484 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out @@ -81,7 +81,7 @@ STAGE PLANS: Statistics: Num rows: 48 Data size: 26496 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: struct), _col4 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -189,7 +189,7 @@ STAGE PLANS: Statistics: Num rows: 48 Data size: 26496 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: struct), _col4 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -323,7 +323,7 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 6624 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: struct), _col3 (type: bigint) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out index 957bc22436..504aa2f118 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out @@ -115,7 +115,7 @@ STAGE PLANS: Statistics: Num rows: 8 Data size: 2944 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -359,7 +359,7 @@ STAGE PLANS: Statistics: Num rows: 8 Data size: 2944 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -635,7 +635,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 736 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out index 4d8fa164fe..a94c52e790 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out @@ -94,7 +94,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -286,7 +286,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -505,7 +505,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out index 5e9e20471d..a54a8da993 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out @@ -94,7 +94,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -237,7 +237,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out index b81a0d372c..333f53e749 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out @@ -94,7 +94,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -255,7 +255,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -423,7 +423,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -592,7 +592,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -798,7 +798,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -959,7 +959,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1134,7 +1134,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 12 Data size: 96 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1296,7 +1296,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1496,7 +1496,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1662,7 +1662,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1828,7 +1828,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1989,7 +1989,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out index e8ca06ef81..976e6b8053 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out @@ -96,7 +96,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -297,7 +297,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -498,7 +498,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -696,7 +696,7 @@ STAGE PLANS: Statistics: Num rows: 18 Data size: 9936 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -891,7 +891,7 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 1104 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1076,7 +1076,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out index 4de6ebbff2..dc465ce477 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out @@ -93,7 +93,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: int), _col3 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out index 0da9a4560a..092350f4da 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_reduce.q.out @@ -289,7 +289,7 @@ STAGE PLANS: Statistics: Num rows: 82 Data size: 328 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -487,7 +487,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 82 Data size: 328 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -780,7 +780,7 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 768 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: int), _col2 (type: double), _col3 (type: decimal(38,18)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1004,7 +1004,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 66000 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: int), _col3 (type: double), _col4 (type: decimal(38,18)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out index 0fc4b0661d..3105d02f3c 100644 --- ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out +++ ql/src/test/results/clientpositive/llap/vector_grouping_sets.q.out @@ -186,7 +186,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 24 Data size: 4416 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -317,7 +317,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 24 Data size: 4416 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -434,7 +434,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: int) Statistics: Num rows: 24 Data size: 4416 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: vectorized, llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/vector_if_expr.q.out ql/src/test/results/clientpositive/llap/vector_if_expr.q.out index fb5cdb526d..49a3fefb3e 100644 --- ql/src/test/results/clientpositive/llap/vector_if_expr.q.out +++ ql/src/test/results/clientpositive/llap/vector_if_expr.q.out @@ -53,7 +53,7 @@ STAGE PLANS: Statistics: Num rows: 3030 Data size: 566572 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out index 4a3bc02276..b512de7a78 100644 --- ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out +++ ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out @@ -207,7 +207,7 @@ STAGE PLANS: Statistics: Num rows: 200 Data size: 35908 Basic stats: COMPLETE Column stats: NONE value expressions: cd_demo_sk (type: int), cd_marital_status (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -234,7 +234,7 @@ STAGE PLANS: Statistics: Num rows: 1000 Data size: 3804 Basic stats: COMPLETE Column stats: NONE value expressions: ss_cdemo_sk (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_inner_join.q.out ql/src/test/results/clientpositive/llap/vector_inner_join.q.out index 438ffd9f5d..184943deab 100644 --- ql/src/test/results/clientpositive/llap/vector_inner_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_inner_join.q.out @@ -109,7 +109,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -160,7 +160,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -268,7 +268,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -331,7 +331,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -479,7 +479,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -531,7 +531,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -623,7 +623,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -692,7 +692,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -810,7 +810,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -862,7 +862,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -979,7 +979,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1031,7 +1031,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1148,7 +1148,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1200,7 +1200,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1292,7 +1292,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1369,7 +1369,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1461,7 +1461,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1538,7 +1538,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_interval_1.q.out ql/src/test/results/clientpositive/llap/vector_interval_1.q.out index 1be72326b3..bc5a4f86a5 100644 --- ql/src/test/results/clientpositive/llap/vector_interval_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_interval_1.q.out @@ -94,7 +94,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 736 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_year_month), _col2 (type: interval_day_time) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -221,7 +221,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 480 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_year_month), _col2 (type: interval_year_month), _col3 (type: interval_year_month), _col4 (type: interval_year_month) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -356,7 +356,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 480 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time), _col4 (type: interval_day_time) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -503,7 +503,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 848 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: date), _col2 (type: date), _col3 (type: date), _col4 (type: date), _col5 (type: date), _col6 (type: date), _col7 (type: timestamp), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -661,7 +661,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 816 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp), _col7 (type: timestamp), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -801,7 +801,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -923,7 +923,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1051,7 +1051,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 192 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time), _col4 (type: interval_day_time), _col5 (type: interval_day_time), _col6 (type: interval_day_time) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_interval_2.q.out ql/src/test/results/clientpositive/llap/vector_interval_2.q.out index 7548686c50..80270d991d 100644 --- ql/src/test/results/clientpositive/llap/vector_interval_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_interval_2.q.out @@ -148,7 +148,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 736 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean), _col10 (type: boolean), _col11 (type: boolean), _col12 (type: boolean), _col13 (type: boolean), _col14 (type: boolean), _col15 (type: boolean), _col16 (type: boolean), _col17 (type: boolean), _col18 (type: boolean), _col19 (type: boolean), _col20 (type: boolean), _col21 (type: boolean), _col22 (type: boolean), _col23 (type: boolean), _col24 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -354,7 +354,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 736 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean), _col10 (type: boolean), _col11 (type: boolean), _col13 (type: boolean), _col14 (type: boolean), _col15 (type: boolean), _col16 (type: boolean), _col17 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -560,7 +560,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 736 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean), _col10 (type: boolean), _col11 (type: boolean), _col12 (type: boolean), _col13 (type: boolean), _col14 (type: boolean), _col15 (type: boolean), _col16 (type: boolean), _col17 (type: boolean), _col18 (type: boolean), _col19 (type: boolean), _col20 (type: boolean), _col21 (type: boolean), _col22 (type: boolean), _col23 (type: boolean), _col24 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -766,7 +766,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 736 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean), _col10 (type: boolean), _col11 (type: boolean), _col13 (type: boolean), _col14 (type: boolean), _col15 (type: boolean), _col16 (type: boolean), _col17 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -963,7 +963,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 1 Data size: 408 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1157,7 +1157,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 1 Data size: 408 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1341,7 +1341,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 1 Data size: 280 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1525,7 +1525,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1719,7 +1719,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1913,7 +1913,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out index f2a4d3a855..01b8aea885 100644 --- ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out +++ ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out @@ -102,7 +102,7 @@ STAGE PLANS: Statistics: Num rows: 50 Data size: 2744 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: date), _col2 (type: date), _col3 (type: date), _col4 (type: date), _col5 (type: date), _col6 (type: date) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -284,7 +284,7 @@ STAGE PLANS: Statistics: Num rows: 50 Data size: 2744 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -466,7 +466,7 @@ STAGE PLANS: Statistics: Num rows: 50 Data size: 1960 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -650,7 +650,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -755,7 +755,7 @@ STAGE PLANS: Statistics: Num rows: 50 Data size: 2744 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -939,7 +939,7 @@ STAGE PLANS: Statistics: Num rows: 50 Data size: 4704 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: timestamp), _col2 (type: interval_day_time), _col3 (type: interval_day_time), _col4 (type: interval_day_time) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1123,7 +1123,7 @@ STAGE PLANS: Statistics: Num rows: 50 Data size: 1960 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1305,7 +1305,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out index cfe3d5f253..c9b1c069cf 100644 --- ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out @@ -252,7 +252,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -295,7 +295,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 850 Data size: 226338 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_join30.q.out ql/src/test/results/clientpositive/llap/vector_join30.q.out index 7c34aff5f3..c7bd21bb1f 100644 --- ql/src/test/results/clientpositive/llap/vector_join30.q.out +++ ql/src/test/results/clientpositive/llap/vector_join30.q.out @@ -75,7 +75,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 475 Data size: 83204 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -142,7 +142,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -292,7 +292,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -328,7 +328,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -453,7 +453,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -513,7 +513,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -651,7 +651,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 475 Data size: 83204 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -724,7 +724,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -766,7 +766,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 475 Data size: 83204 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -903,7 +903,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -939,7 +939,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -974,7 +974,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1133,7 +1133,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1169,7 +1169,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1204,7 +1204,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1363,7 +1363,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1399,7 +1399,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1434,7 +1434,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1593,7 +1593,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1629,7 +1629,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1664,7 +1664,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out index dc8f47eeae..4e627c6af5 100644 --- ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_left_outer_join.q.out @@ -71,7 +71,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -96,7 +96,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 12288 Data size: 36696 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -121,7 +121,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Statistics: Num rows: 12288 Data size: 36696 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out index a5a3670742..c1c126d55b 100644 --- ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out +++ ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out @@ -132,7 +132,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 2 Map Operator Tree: TableScan @@ -149,7 +149,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: char(2)) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator @@ -226,7 +226,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 2 Map Operator Tree: TableScan @@ -243,7 +243,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: char(2)) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator @@ -340,7 +340,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -376,7 +376,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: char(2)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -482,7 +482,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -518,7 +518,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: char(2)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -621,7 +621,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -657,7 +657,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: char(2)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -760,7 +760,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -796,7 +796,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: char(2)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out index 7c1cbb619a..f757d4b554 100644 --- ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out +++ ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out @@ -296,7 +296,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -325,7 +325,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_null_projection.q.out ql/src/test/results/clientpositive/llap/vector_null_projection.q.out index 8c60363b19..20a6acdd77 100644 --- ql/src/test/results/clientpositive/llap/vector_null_projection.q.out +++ ql/src/test/results/clientpositive/llap/vector_null_projection.q.out @@ -64,7 +64,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -131,7 +131,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: void) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -158,7 +158,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: void) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out index e231bea7b4..131a9df885 100644 --- ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out +++ ql/src/test/results/clientpositive/llap/vector_number_compare_projection.q.out @@ -158,7 +158,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -284,7 +284,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 28 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_nvl.q.out ql/src/test/results/clientpositive/llap/vector_nvl.q.out index 837a574b1b..4873f258a0 100644 --- ql/src/test/results/clientpositive/llap/vector_nvl.q.out +++ ql/src/test/results/clientpositive/llap/vector_nvl.q.out @@ -61,7 +61,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -156,7 +156,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -249,7 +249,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -329,7 +329,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out index e4bc4f08f9..098f1ce481 100644 --- ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out +++ ql/src/test/results/clientpositive/llap/vector_orderby_5.q.out @@ -164,7 +164,7 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 22812 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out index a6e7e709f8..7eaf831bf7 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out @@ -127,7 +127,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -172,7 +172,7 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 554 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -262,7 +262,7 @@ STAGE PLANS: Statistics: Num rows: 6 Data size: 554 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -325,7 +325,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out index c0885c759a..4305a786d2 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out @@ -291,7 +291,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -336,7 +336,7 @@ STAGE PLANS: Statistics: Num rows: 15 Data size: 3697 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -465,7 +465,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -509,7 +509,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 15 Data size: 44 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -761,7 +761,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -805,7 +805,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 15 Data size: 44 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -849,7 +849,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 15 Data size: 44 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out index e0f429e1e3..80fc0d5d0a 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out @@ -336,7 +336,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -380,7 +380,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 20 Data size: 44 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -424,7 +424,7 @@ STAGE PLANS: valueColumnNums: [] Statistics: Num rows: 20 Data size: 88 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out index 068453f068..be1b62cc62 100644 --- ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out +++ ql/src/test/results/clientpositive/llap/vector_partition_diff_num_cols.q.out @@ -130,7 +130,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -300,7 +300,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -470,7 +470,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -627,7 +627,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: PARTIAL value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -784,7 +784,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out index 4cfed7a1da..eefe4837fe 100644 --- ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out +++ ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out @@ -293,7 +293,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: string), _col1 (type: string), _col3 (type: timestamp), _col4 (type: float) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -471,7 +471,7 @@ STAGE PLANS: Statistics: Num rows: 137 Data size: 7392 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -965,7 +965,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1200,7 +1200,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: timestamp), _col3 (type: float) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1402,7 +1402,7 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 768 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1920,7 +1920,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -2155,7 +2155,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: date), _col3 (type: float) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -2357,7 +2357,7 @@ STAGE PLANS: Statistics: Num rows: 12 Data size: 576 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_reduce1.q.out ql/src/test/results/clientpositive/llap/vector_reduce1.q.out index c4a6ae69fd..55a8d46a30 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce1.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce1.q.out @@ -147,7 +147,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 2000 Data size: 15208 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_reduce2.q.out ql/src/test/results/clientpositive/llap/vector_reduce2.q.out index 03c8bc8151..87cfebd49f 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce2.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce2.q.out @@ -147,7 +147,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 2000 Data size: 707172 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_reduce3.q.out ql/src/test/results/clientpositive/llap/vector_reduce3.q.out index a6a1f56431..d3089d271d 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce3.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce3.q.out @@ -147,7 +147,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 2000 Data size: 349784 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out index 3f92327338..fedfc98f34 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce_groupby_decimal.q.out @@ -85,7 +85,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col4 (type: decimal(20,10)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_string_concat.q.out ql/src/test/results/clientpositive/llap/vector_string_concat.q.out index 2ceef58012..713f93346a 100644 --- ql/src/test/results/clientpositive/llap/vector_string_concat.q.out +++ ql/src/test/results/clientpositive/llap/vector_string_concat.q.out @@ -151,7 +151,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -375,7 +375,7 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 106456 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_struct_in.q.out ql/src/test/results/clientpositive/llap/vector_struct_in.q.out index c2b3af7551..b4b6b19603 100644 --- ql/src/test/results/clientpositive/llap/vector_struct_in.q.out +++ ql/src/test/results/clientpositive/llap/vector_struct_in.q.out @@ -88,7 +88,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -205,7 +205,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -344,7 +344,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -461,7 +461,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -600,7 +600,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -717,7 +717,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -859,7 +859,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -979,7 +979,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_udf_octet_length.q.out ql/src/test/results/clientpositive/llap/vector_udf_octet_length.q.out index 5b98553aa5..bc42be4618 100644 --- ql/src/test/results/clientpositive/llap/vector_udf_octet_length.q.out +++ ql/src/test/results/clientpositive/llap/vector_udf_octet_length.q.out @@ -53,6 +53,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + Write Type: INSERT Execution mode: vectorized, llap LLAP IO: no inputs @@ -62,12 +63,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 + micromanaged table: true Stage: Stage-3 Stats Work @@ -187,7 +189,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out index c04cc614a9..31e0e07985 100644 --- ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out +++ ql/src/test/results/clientpositive/llap/vector_varchar_4.q.out @@ -169,8 +169,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe name: default.varchar_lazy_binary_columnar + Write Type: INSERT Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -187,12 +188,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe name: default.varchar_lazy_binary_columnar + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out index 117246e57c..c7b2169aaf 100644 --- ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out +++ ql/src/test/results/clientpositive/llap/vector_varchar_mapjoin1.q.out @@ -173,7 +173,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: varchar(10)), _col2 (type: int), _col3 (type: varchar(10)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -202,7 +202,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -290,7 +290,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -328,7 +328,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: varchar(10)), _col2 (type: int), _col3 (type: varchar(20)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -418,7 +418,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: varchar(10)) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -456,7 +456,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: varchar(10)), _col2 (type: int), _col3 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out index 181ab50408..01ff16f295 100644 --- ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out +++ ql/src/test/results/clientpositive/llap/vector_when_case_null.q.out @@ -77,7 +77,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 192 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_windowing.q.out ql/src/test/results/clientpositive/llap/vector_windowing.q.out index 8dfee97acd..69d541b4d8 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing.q.out @@ -5450,6 +5450,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_1 + Write Type: INSERT Select Operator expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: double) outputColumnNames: p_mfgr, p_name, p_size, r, dr, s @@ -5626,6 +5627,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_2 + Write Type: INSERT Select Operator expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: double), _col7 (type: int) outputColumnNames: p_mfgr, p_name, p_size, r, dr, cud, s2, fv1 @@ -5787,6 +5789,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_3 + Write Type: INSERT Select Operator expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int) outputColumnNames: p_mfgr, p_name, p_size, c, ca, fv @@ -5807,12 +5810,13 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_1 + micromanaged table: true Stage: Stage-5 Stats Work @@ -5825,12 +5829,13 @@ STAGE PLANS: Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_2 + micromanaged table: true Stage: Stage-6 Stats Work @@ -5843,12 +5848,13 @@ STAGE PLANS: Stage: Stage-2 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_3 + micromanaged table: true Stage: Stage-7 Stats Work diff --git ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out index 74997d3ac2..3e21bf3b6b 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out @@ -118,7 +118,7 @@ STAGE PLANS: Statistics: Num rows: 9174 Data size: 671296 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out index c80fefa04b..e45c710f8e 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out @@ -961,7 +961,7 @@ STAGE PLANS: Statistics: Num rows: 9174 Data size: 671296 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_1.q.out ql/src/test/results/clientpositive/llap/vectorization_1.q.out index 278bd0c9ac..e02ddc58db 100644 --- ql/src/test/results/clientpositive/llap/vectorization_1.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_1.q.out @@ -102,7 +102,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct), _col1 (type: double), _col2 (type: tinyint), _col3 (type: int), _col4 (type: struct), _col5 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_10.q.out ql/src/test/results/clientpositive/llap/vectorization_10.q.out index b6c68fbedd..8c9f50b371 100644 --- ql/src/test/results/clientpositive/llap/vectorization_10.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_10.q.out @@ -94,7 +94,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_11.q.out ql/src/test/results/clientpositive/llap/vectorization_11.q.out index bb0feecc73..af9465eb85 100644 --- ql/src/test/results/clientpositive/llap/vectorization_11.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_11.q.out @@ -76,7 +76,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_12.q.out ql/src/test/results/clientpositive/llap/vectorization_12.q.out index 1285b25c16..2dc67de24a 100644 --- ql/src/test/results/clientpositive/llap/vectorization_12.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_12.q.out @@ -129,7 +129,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 370 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col4 (type: bigint), _col5 (type: struct), _col6 (type: struct), _col7 (type: bigint), _col8 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_13.q.out ql/src/test/results/clientpositive/llap/vectorization_13.q.out index e50f3e2d6b..e035e343ff 100644 --- ql/src/test/results/clientpositive/llap/vectorization_13.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_13.q.out @@ -131,7 +131,7 @@ STAGE PLANS: Statistics: Num rows: 2730 Data size: 816734 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col5 (type: tinyint), _col6 (type: double), _col7 (type: struct), _col8 (type: struct), _col9 (type: float), _col10 (type: tinyint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -482,7 +482,7 @@ STAGE PLANS: Statistics: Num rows: 2730 Data size: 816734 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col5 (type: tinyint), _col6 (type: double), _col7 (type: struct), _col8 (type: struct), _col9 (type: float), _col10 (type: tinyint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_14.q.out ql/src/test/results/clientpositive/llap/vectorization_14.q.out index 0f77070cd0..895fed2adc 100644 --- ql/src/test/results/clientpositive/llap/vectorization_14.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_14.q.out @@ -132,7 +132,7 @@ STAGE PLANS: Statistics: Num rows: 303 Data size: 137686 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col5 (type: struct), _col6 (type: float), _col7 (type: struct), _col8 (type: bigint), _col9 (type: struct), _col10 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_15.q.out ql/src/test/results/clientpositive/llap/vectorization_15.q.out index ae4fe18ffb..8504de567c 100644 --- ql/src/test/results/clientpositive/llap/vectorization_15.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_15.q.out @@ -127,7 +127,7 @@ STAGE PLANS: Statistics: Num rows: 6144 Data size: 3293884 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col7 (type: struct), _col8 (type: double), _col9 (type: struct), _col10 (type: struct), _col11 (type: struct), _col12 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_16.q.out ql/src/test/results/clientpositive/llap/vectorization_16.q.out index 7cf60ae514..ebbdc88edb 100644 --- ql/src/test/results/clientpositive/llap/vectorization_16.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_16.q.out @@ -104,7 +104,7 @@ STAGE PLANS: Statistics: Num rows: 2048 Data size: 434588 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col3 (type: bigint), _col4 (type: struct), _col5 (type: double) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_17.q.out ql/src/test/results/clientpositive/llap/vectorization_17.q.out index bdcb1ebfee..9d20383da7 100644 --- ql/src/test/results/clientpositive/llap/vectorization_17.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_17.q.out @@ -98,7 +98,7 @@ STAGE PLANS: Statistics: Num rows: 4096 Data size: 1212930 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: timestamp), _col4 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: double), _col9 (type: double), _col10 (type: double), _col11 (type: double), _col12 (type: decimal(11,4)), _col13 (type: double) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_2.q.out ql/src/test/results/clientpositive/llap/vectorization_2.q.out index 4dfb73dd99..5391062a55 100644 --- ql/src/test/results/clientpositive/llap/vectorization_2.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_2.q.out @@ -106,7 +106,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 256 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct), _col1 (type: double), _col2 (type: struct), _col3 (type: bigint), _col4 (type: tinyint), _col5 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_3.q.out ql/src/test/results/clientpositive/llap/vectorization_3.q.out index 6bff73936b..b3b33d55da 100644 --- ql/src/test/results/clientpositive/llap/vectorization_3.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_3.q.out @@ -111,7 +111,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 404 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: double), _col4 (type: struct), _col5 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_4.q.out ql/src/test/results/clientpositive/llap/vectorization_4.q.out index a38c77c242..86988e83d9 100644 --- ql/src/test/results/clientpositive/llap/vectorization_4.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_4.q.out @@ -106,7 +106,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 252 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: tinyint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_5.q.out ql/src/test/results/clientpositive/llap/vectorization_5.q.out index d41de01162..166b1bfa7b 100644 --- ql/src/test/results/clientpositive/llap/vectorization_5.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_5.q.out @@ -100,7 +100,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: smallint), _col1 (type: bigint), _col2 (type: smallint), _col3 (type: bigint), _col4 (type: tinyint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_6.q.out ql/src/test/results/clientpositive/llap/vectorization_6.q.out index 84b9260c18..86d75f2c78 100644 --- ql/src/test/results/clientpositive/llap/vectorization_6.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_6.q.out @@ -88,7 +88,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_7.q.out ql/src/test/results/clientpositive/llap/vectorization_7.q.out index 3c7522912d..1b35ae2f16 100644 --- ql/src/test/results/clientpositive/llap/vectorization_7.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_7.q.out @@ -104,7 +104,7 @@ STAGE PLANS: Statistics: Num rows: 5461 Data size: 923616 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -351,7 +351,7 @@ STAGE PLANS: Statistics: Num rows: 5461 Data size: 923616 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_8.q.out ql/src/test/results/clientpositive/llap/vectorization_8.q.out index 22a1b34d0b..c1a26da7d6 100644 --- ql/src/test/results/clientpositive/llap/vectorization_8.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_8.q.out @@ -100,7 +100,7 @@ STAGE PLANS: Statistics: Num rows: 3059 Data size: 557250 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -334,7 +334,7 @@ STAGE PLANS: Statistics: Num rows: 3059 Data size: 557250 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_9.q.out ql/src/test/results/clientpositive/llap/vectorization_9.q.out index 7cf60ae514..ebbdc88edb 100644 --- ql/src/test/results/clientpositive/llap/vectorization_9.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_9.q.out @@ -104,7 +104,7 @@ STAGE PLANS: Statistics: Num rows: 2048 Data size: 434588 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col3 (type: bigint), _col4 (type: struct), _col5 (type: double) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out index c83d5e6f22..9846b951be 100644 --- ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out @@ -68,7 +68,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out index ab8752a8d2..84aca17f9d 100644 --- ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_input_format_excludes.q.out @@ -1126,7 +1126,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: false enabledConditionsNotMet: hive.vectorized.use.vectorized.input.format IS true AND hive.vectorized.input.format.excludes NOT CONTAINS org.apache.hadoop.hive.ql.io.orc.OrcInputFormat IS false @@ -1218,7 +1218,7 @@ STAGE PLANS: Statistics: Num rows: 128 Data size: 22404 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: int), _col2 (type: smallint), _col3 (type: bigint), _col4 (type: struct), _col5 (type: struct) Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: false enabledConditionsNotMet: hive.vectorized.use.vectorized.input.format IS true AND hive.vectorized.input.format.excludes NOT CONTAINS org.apache.hadoop.hive.ql.io.orc.OrcInputFormat IS false diff --git ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out index e46c7f4524..d6a6f9bac5 100644 --- ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out @@ -81,7 +81,7 @@ STAGE PLANS: Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out index ca2aa87176..c62f0e62ad 100644 --- ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_pushdown.q.out @@ -42,7 +42,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out index 0027ab5226..0ed432c49b 100644 --- ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out @@ -131,7 +131,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct), _col1 (type: double), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: tinyint), _col8 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -392,7 +392,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 420 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: struct), _col2 (type: struct), _col3 (type: double), _col4 (type: struct), _col5 (type: int), _col6 (type: double), _col7 (type: struct), _col8 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -645,7 +645,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct), _col1 (type: bigint), _col2 (type: tinyint), _col3 (type: struct), _col4 (type: int), _col5 (type: struct), _col6 (type: bigint), _col7 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -877,7 +877,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 328 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct), _col1 (type: bigint), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: float) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1107,7 +1107,7 @@ STAGE PLANS: Statistics: Num rows: 9898 Data size: 5632662 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1404,7 +1404,7 @@ STAGE PLANS: Statistics: Num rows: 8194 Data size: 3349228 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1651,7 +1651,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col2 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1955,7 +1955,7 @@ STAGE PLANS: TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: timestamp) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -2214,7 +2214,7 @@ STAGE PLANS: Statistics: Num rows: 1141 Data size: 204228 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: struct), _col2 (type: bigint), _col3 (type: struct), _col4 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -2490,7 +2490,7 @@ STAGE PLANS: Statistics: Num rows: 1136 Data size: 306696 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: struct), _col2 (type: bigint), _col3 (type: double), _col4 (type: struct), _col5 (type: struct), _col6 (type: double) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -2810,7 +2810,7 @@ STAGE PLANS: Statistics: Num rows: 6144 Data size: 5199016 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: bigint), _col5 (type: tinyint), _col6 (type: struct), _col7 (type: struct), _col8 (type: struct), _col9 (type: struct), _col10 (type: struct), _col11 (type: double), _col12 (type: struct), _col13 (type: struct), _col14 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -3211,7 +3211,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: float), _col2 (type: bigint), _col3 (type: struct), _col4 (type: struct), _col5 (type: bigint), _col6 (type: struct), _col7 (type: bigint), _col8 (type: struct), _col9 (type: struct), _col10 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -3446,7 +3446,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: PARTIAL Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -3561,7 +3561,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -3748,7 +3748,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -3863,7 +3863,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -3978,7 +3978,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -4093,7 +4093,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -4208,7 +4208,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -4323,7 +4323,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out index 61c5051bb9..6300b17a71 100644 --- ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out @@ -141,7 +141,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE value expressions: value (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -176,7 +176,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE value expressions: value (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -276,7 +276,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE value expressions: value (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -411,7 +411,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE value expressions: value (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_case.q.out ql/src/test/results/clientpositive/llap/vectorized_case.q.out index d2de89c361..beeb764cbb 100644 --- ql/src/test/results/clientpositive/llap/vectorized_case.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_case.q.out @@ -81,7 +81,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -223,7 +223,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -305,7 +305,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -433,7 +433,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -544,7 +544,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator @@ -595,7 +595,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator @@ -646,7 +646,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator @@ -713,7 +713,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator @@ -764,7 +764,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator @@ -815,7 +815,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/vectorized_casts.q.out ql/src/test/results/clientpositive/llap/vectorized_casts.q.out index 84b4d9454d..558567de1f 100644 --- ql/src/test/results/clientpositive/llap/vectorized_casts.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_casts.q.out @@ -196,7 +196,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_context.q.out ql/src/test/results/clientpositive/llap/vectorized_context.q.out index 8907c7f809..2f008ba0d5 100644 --- ql/src/test/results/clientpositive/llap/vectorized_context.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_context.q.out @@ -159,7 +159,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -187,7 +187,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 6075 Data size: 24300 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -216,7 +216,7 @@ STAGE PLANS: Statistics: Num rows: 6075 Data size: 615730 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out index db292508ac..a8d2ff08e1 100644 --- ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out @@ -284,7 +284,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -576,7 +576,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -872,7 +872,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1146,7 +1146,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1276,7 +1276,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: date), _col1 (type: date), _col2 (type: bigint), _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out index 228bd9de1b..51d8b9948c 100644 --- ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out @@ -80,7 +80,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 475 Data size: 1808 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -151,7 +151,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -314,7 +314,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 475 Data size: 83204 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -385,7 +385,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 736 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -548,7 +548,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 475 Data size: 83204 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -619,7 +619,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 736 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -783,7 +783,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 475 Data size: 1808 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -854,7 +854,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -925,7 +925,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1120,7 +1120,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 450 Data size: 80539 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1219,7 +1219,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1411,7 +1411,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 475 Data size: 1808 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1482,7 +1482,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1645,7 +1645,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 475 Data size: 1808 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -1678,7 +1678,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out index c3b980dd26..31b7072e3f 100644 --- ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out @@ -128,7 +128,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Statistics: Num rows: 100 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -161,7 +161,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -264,7 +264,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(10,1)) Statistics: Num rows: 100 Data size: 11200 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -297,7 +297,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,1)), _col1 (type: decimal(10,1)), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -400,7 +400,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: double) Statistics: Num rows: 100 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -433,7 +433,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: double), _col1 (type: double), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -536,7 +536,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: date) Statistics: Num rows: 100 Data size: 5600 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -569,7 +569,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: date), _col1 (type: date), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -672,7 +672,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: timestamp) Statistics: Num rows: 100 Data size: 4000 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -705,7 +705,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: timestamp), _col1 (type: timestamp), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -808,7 +808,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 100 Data size: 9400 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -841,7 +841,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -944,7 +944,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: char(10)) Statistics: Num rows: 100 Data size: 9400 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -977,7 +977,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 282 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: char(10)), _col1 (type: char(10)), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1080,7 +1080,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: varchar(10)) Statistics: Num rows: 100 Data size: 9400 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map 4 Map Operator Tree: TableScan @@ -1113,7 +1113,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 282 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: varchar(10)), _col1 (type: varchar(10)), _col2 (type: binary) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Reducer 2 Execution mode: llap Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/llap/vectorized_insert_into_bucketed_table.q.out ql/src/test/results/clientpositive/llap/vectorized_insert_into_bucketed_table.q.out index 28a96e08ce..f8d6559477 100644 --- ql/src/test/results/clientpositive/llap/vectorized_insert_into_bucketed_table.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_insert_into_bucketed_table.q.out @@ -110,6 +110,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.foo + Write Type: INSERT Stage: Stage-2 Dependency Collection @@ -117,12 +118,14 @@ STAGE PLANS: Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.foo + Write Type: INSERT + micromanaged table: true Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out index 0dff57afa6..3f1acde759 100644 --- ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out @@ -89,7 +89,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: int), _col2 (type: int), _col3 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -131,7 +131,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 9173 Data size: 27396 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out index 36f1bbf12f..987b996bf0 100644 --- ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out @@ -149,7 +149,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out index 80e1cabc2d..36a12f74ef 100644 --- ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out @@ -65,7 +65,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: double) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -94,7 +94,7 @@ STAGE PLANS: Statistics: Num rows: 6059 Data size: 72396 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: smallint), _col2 (type: double) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -122,7 +122,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: smallint) Statistics: Num rows: 9174 Data size: 27400 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out index 72216d3383..afa255a65a 100644 --- ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out @@ -56,7 +56,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 9173 Data size: 27396 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -98,7 +98,7 @@ STAGE PLANS: nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 9173 Data size: 27396 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out index 1f6e152991..760b49fb89 100644 --- ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_string_funcs.q.out @@ -75,7 +75,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out index ab2ac79b12..ea00c59a6f 100644 --- ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out @@ -53,7 +53,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Stage: Stage-0 Fetch Operator @@ -160,7 +160,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: timestamp), _col1 (type: timestamp) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -292,7 +292,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -385,7 +385,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -525,7 +525,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 560 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out index 7986494d5e..843ecb3f5d 100644 --- ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out @@ -131,7 +131,7 @@ STAGE PLANS: Statistics: Num rows: 40 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -313,7 +313,7 @@ STAGE PLANS: Statistics: Num rows: 40 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -495,7 +495,7 @@ STAGE PLANS: Statistics: Num rows: 40 Data size: 1440 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -677,7 +677,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -820,7 +820,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: timestamp), _col1 (type: timestamp), _col2 (type: bigint), _col3 (type: bigint) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -947,7 +947,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: double) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -1091,7 +1091,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: struct), _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct) Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out index c346ff2cf8..79af32a36c 100644 --- ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out @@ -81,7 +81,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true @@ -245,7 +245,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap - LLAP IO: all inputs + LLAP IO: may be used (ACID table) Map Vectorization: enabled: true enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true diff --git ql/src/test/results/clientpositive/rcfile_null_value.q.out ql/src/test/results/clientpositive/rcfile_null_value.q.out index 2d2bef9be5..2efa6fc54f 100644 --- ql/src/test/results/clientpositive/rcfile_null_value.q.out +++ ql/src/test/results/clientpositive/rcfile_null_value.q.out @@ -150,31 +150,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.dest1_rc - Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col2 (type: int), _col3 (type: string) - outputColumnNames: c1, c2, c3, c4 - Statistics: Num rows: 60 Data size: 642 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(c1, 'hll'), compute_stats(c2, 'hll'), compute_stats(c3, 'hll'), compute_stats(c4, 'hll') - mode: hash - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 1728 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.dest1_rc + micromanaged table: true Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/sample8.q.out ql/src/test/results/clientpositive/sample8.q.out index 365b7cfa31..686f3bdd04 100644 --- ql/src/test/results/clientpositive/sample8.q.out +++ ql/src/test/results/clientpositive/sample8.q.out @@ -97,6 +97,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -145,6 +147,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -193,6 +197,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -241,6 +247,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git ql/src/test/results/clientpositive/show_create_table_delimited.q.out ql/src/test/results/clientpositive/show_create_table_delimited.q.out index 9fb35c5dda..0552f952d4 100644 --- ql/src/test/results/clientpositive/show_create_table_delimited.q.out +++ ql/src/test/results/clientpositive/show_create_table_delimited.q.out @@ -39,6 +39,8 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'transactional'='true', + 'transactional_properties'='insert_only', #### A masked pattern was here #### PREHOOK: query: DROP TABLE tmp_showcrt1 PREHOOK: type: DROPTABLE diff --git ql/src/test/results/clientpositive/stats1.q.out ql/src/test/results/clientpositive/stats1.q.out index 461d27ee73..83abd99de9 100644 --- ql/src/test/results/clientpositive/stats1.q.out +++ ql/src/test/results/clientpositive/stats1.q.out @@ -75,19 +75,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 26 Data size: 7072 Basic stats: COMPLETE Column stats: PARTIAL - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - value expressions: _col0 (type: struct), _col1 (type: struct) + Write Type: INSERT TableScan alias: s2 Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE @@ -105,42 +93,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 26 Data size: 7072 Basic stats: COMPLETE Column stats: PARTIAL - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - value expressions: _col0 (type: struct), _col1 (type: struct) - Reduce Operator Tree: - Group By Operator - aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1) - mode: mergepartial - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - File Output Operator - compressed: false - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + micromanaged table: true Stage: Stage-3 Stats Work @@ -226,6 +190,8 @@ Table Parameters: numRows 26 rawDataSize 199 totalSize 225 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -267,6 +233,8 @@ Table Parameters: numRows 26 rawDataSize 199 totalSize 1583 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/stats15.q.out ql/src/test/results/clientpositive/stats15.q.out index faebe8afd8..96fef6fd10 100644 --- ql/src/test/results/clientpositive/stats15.q.out +++ ql/src/test/results/clientpositive/stats15.q.out @@ -46,6 +46,8 @@ Table Parameters: numRows 500 rawDataSize 5312 totalSize 5812 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -148,6 +150,8 @@ Table Parameters: numRows 1500 rawDataSize 15936 totalSize 17436 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information @@ -284,6 +288,8 @@ Table Parameters: numRows 1500 rawDataSize 15936 totalSize 17436 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/transform_ppr1.q.out ql/src/test/results/clientpositive/transform_ppr1.q.out index b0c23931b2..ffd050c216 100644 --- ql/src/test/results/clientpositive/transform_ppr1.q.out +++ ql/src/test/results/clientpositive/transform_ppr1.q.out @@ -105,6 +105,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -153,6 +155,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -201,6 +205,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -249,6 +255,8 @@ STAGE PLANS: serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transactional true + transactional_properties insert_only #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git ql/src/test/results/clientpositive/union10.q.out ql/src/test/results/clientpositive/union10.q.out index 0bb9ccd1e5..ac9188b2be 100644 --- ql/src/test/results/clientpositive/union10.q.out +++ ql/src/test/results/clientpositive/union10.q.out @@ -27,7 +27,7 @@ STAGE DEPENDENCIES: Stage-2 depends on stages: Stage-1, Stage-9, Stage-10 Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6 Stage-5 - Stage-0 depends on stages: Stage-5, Stage-4, Stage-7 + Stage-0 depends on stages: Stage-5, Stage-4, Stage-7, Stage-2 Stage-3 depends on stages: Stage-0 Stage-4 Stage-6 @@ -88,19 +88,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: key, value - Statistics: Num rows: 3 Data size: 276 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: struct), _col1 (type: struct) + Write Type: INSERT TableScan Union Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE @@ -116,19 +104,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: key, value - Statistics: Num rows: 3 Data size: 276 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: struct), _col1 (type: struct) + Write Type: INSERT TableScan Union Statistics: Num rows: 3 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE @@ -144,51 +120,24 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: key, value - Statistics: Num rows: 3 Data size: 276 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: struct), _col1 (type: struct) - Reduce Operator Tree: - Group By Operator - aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1) - mode: mergepartial - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE - File Output Operator - compressed: false - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Write Type: INSERT Stage: Stage-8 Conditional Operator Stage: Stage-5 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + micromanaged table: true Stage: Stage-3 Stats Work @@ -223,10 +172,7 @@ STAGE PLANS: name: default.tmptable Stage: Stage-7 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-9 Map Reduce diff --git ql/src/test/results/clientpositive/union31.q.out ql/src/test/results/clientpositive/union31.q.out index 6de0b40f29..3618529b11 100644 --- ql/src/test/results/clientpositive/union31.q.out +++ ql/src/test/results/clientpositive/union31.q.out @@ -183,31 +183,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: key, cnt - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 + micromanaged table: true Stage: Stage-3 Stats Work @@ -276,31 +263,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t4 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: value, cnt - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(value, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t4 + micromanaged table: true Stage: Stage-7 Map Reduce @@ -514,21 +488,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t5 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: c1, cnt - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(c1, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Group By Operator aggregations: sum(VALUE._col0) keys: KEY._col0 (type: string) @@ -547,31 +507,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t6 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: c1, cnt - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(c1, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t5 + micromanaged table: true Stage: Stage-4 Stats Work @@ -614,12 +561,13 @@ STAGE PLANS: Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t6 + micromanaged table: true Stage: Stage-7 Map Reduce @@ -926,21 +874,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t7 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: c1, cnt - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(c1, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Group By Operator aggregations: count(1) keys: KEY._col0 (type: string) @@ -959,31 +893,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t8 - Select Operator - expressions: _col0 (type: string), _col1 (type: int) - outputColumnNames: c1, cnt - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE - Group By Operator - aggregations: compute_stats(c1, 'hll'), compute_stats(cnt, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t7 + micromanaged table: true Stage: Stage-4 Stats Work @@ -1026,12 +947,13 @@ STAGE PLANS: Stage: Stage-1 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t8 + micromanaged table: true Stage: Stage-7 Map Reduce diff --git ql/src/test/results/clientpositive/union6.q.out ql/src/test/results/clientpositive/union6.q.out index ddfd54fd2e..43ea52195b 100644 --- ql/src/test/results/clientpositive/union6.q.out +++ ql/src/test/results/clientpositive/union6.q.out @@ -23,7 +23,7 @@ STAGE DEPENDENCIES: Stage-2 depends on stages: Stage-1 Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6 Stage-5 - Stage-0 depends on stages: Stage-5, Stage-4, Stage-7 + Stage-0 depends on stages: Stage-5, Stage-4, Stage-7, Stage-2 Stage-3 depends on stages: Stage-0 Stage-4 Stage-6 @@ -78,19 +78,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 26 Data size: 7072 Basic stats: COMPLETE Column stats: PARTIAL - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - value expressions: _col0 (type: struct), _col1 (type: struct) + Write Type: INSERT TableScan alias: s2 Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE @@ -108,51 +96,24 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable - Select Operator - expressions: _col0 (type: string), _col1 (type: string) - outputColumnNames: key, value - Statistics: Num rows: 26 Data size: 7072 Basic stats: COMPLETE Column stats: PARTIAL - Group By Operator - aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll') - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - value expressions: _col0 (type: struct), _col1 (type: struct) - Reduce Operator Tree: - Group By Operator - aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1) - mode: mergepartial - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - File Output Operator - compressed: false - Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: PARTIAL - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Write Type: INSERT Stage: Stage-8 Conditional Operator Stage: Stage-5 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable + micromanaged table: true Stage: Stage-3 Stats Work @@ -187,10 +148,7 @@ STAGE PLANS: name: default.tmptable Stage: Stage-7 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection PREHOOK: query: insert overwrite table tmptable select unionsrc.key, unionsrc.value FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1 diff --git ql/src/test/results/clientpositive/union_remove_1.q.out ql/src/test/results/clientpositive/union_remove_1.q.out index 6285d0d514..c40dbec58b 100644 --- ql/src/test/results/clientpositive/union_remove_1.q.out +++ ql/src/test/results/clientpositive/union_remove_1.q.out @@ -83,16 +83,18 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 + Write Type: INSERT Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 + micromanaged table: true Stage: Stage-2 Map Reduce @@ -131,6 +133,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 + Write Type: INSERT PREHOOK: query: insert overwrite table outputTbl1 SELECT * @@ -173,6 +176,8 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 2 totalSize 40 + transactional true + transactional_properties insert_only #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/vector_decimal_6.q.out ql/src/test/results/clientpositive/vector_decimal_6.q.out index 8896459f73..fbca6d40eb 100644 --- ql/src/test/results/clientpositive/vector_decimal_6.q.out +++ ql/src/test/results/clientpositive/vector_decimal_6.q.out @@ -564,12 +564,14 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.DECIMAL_6_3 + Write Type: INSERT Stage: Stage-0 Move Operator files: hdfs directory: true #### A masked pattern was here #### + Write Type: INSERT Stage: Stage-3 Create Table Operator: @@ -579,6 +581,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat serde name: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.DECIMAL_6_3 + table properties: + transactional true + transactional_properties insert_only Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/vector_varchar_4.q.out ql/src/test/results/clientpositive/vector_varchar_4.q.out index 00a82c3813..957be0bcb5 100644 --- ql/src/test/results/clientpositive/vector_varchar_4.q.out +++ ql/src/test/results/clientpositive/vector_varchar_4.q.out @@ -170,6 +170,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe name: default.varchar_lazy_binary_columnar + Write Type: INSERT Execution mode: vectorized Map Vectorization: enabled: true @@ -185,20 +186,18 @@ STAGE PLANS: Conditional Operator Stage: Stage-4 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection Stage: Stage-0 Move Operator tables: - replace: true + replace: false table: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe name: default.varchar_lazy_binary_columnar + micromanaged table: true Stage: Stage-2 Stats Work @@ -219,8 +218,5 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat Stage: Stage-6 - Move Operator - files: - hdfs directory: true -#### A masked pattern was here #### + Dependency Collection diff --git service/src/test/org/apache/hive/service/cli/CLIServiceTest.java service/src/test/org/apache/hive/service/cli/CLIServiceTest.java index bc6648e408..410cf3e7dc 100644 --- service/src/test/org/apache/hive/service/cli/CLIServiceTest.java +++ service/src/test/org/apache/hive/service/cli/CLIServiceTest.java @@ -156,7 +156,7 @@ public void testExecuteStatement() throws Exception { OperationHandle opHandle; String queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + + " = true"; opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); client.closeOperation(opHandle); @@ -213,7 +213,7 @@ public void testExecuteStatementAsync() throws Exception { // Change lock manager, otherwise unit-test doesn't go through queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + + " = true"; opHandle = client.executeStatement(sessionHandle, queryString, confOverlay); client.closeOperation(opHandle); @@ -499,7 +499,7 @@ private SessionHandle openSession(Map confOverlay) SessionState.get().setIsHiveServerQuery(true); // Pretend we are in HS2. String queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + + " = true"; client.executeStatement(sessionHandle, queryString, confOverlay); return sessionHandle; } @@ -587,8 +587,8 @@ public void testConfOverlay() throws Exception { String tabName = "TEST_CONF_EXEC"; String tabNameVar = "tabNameVar"; - String setLockMgr = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname - + " = false"; + String setLockMgr = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + + " = true"; OperationHandle opHandle = client.executeStatement(sessionHandle, setLockMgr, null); client.closeOperation(opHandle); diff --git service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java index c4f5451d9d..dd2f491cbf 100644 --- service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java +++ service/src/test/org/apache/hive/service/cli/operation/TestQueryLifeTimeHooksWithSQLOperation.java @@ -47,7 +47,6 @@ @Test public void testQueryInfoInHookContext() throws IllegalAccessException, ClassNotFoundException, InstantiationException, HiveSQLException { HiveConf conf = new HiveConf(TestQueryHooks.class); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.set(HiveConf.ConfVars.HIVE_QUERY_LIFETIME_HOOKS.varname, QueryInfoVerificationHook.class.getName()); diff --git service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java index 646159f1e4..c60ede7c02 100644 --- service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java +++ service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java @@ -74,7 +74,6 @@ public void setup() throws Exception { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_SESSION_CHECK_INTERVAL, "3s"); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true); - conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); conf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, false); MetricsFactory.init(conf); diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index b46cc38a22..835606f15f 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -835,7 +835,7 @@ public static ConfVars getMetaConf(String name) { "no transactions."), // Metastore always support concurrency, but certain ACID tests depend on this being set. We // need to do the work to detangle this - HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", "hive.support.concurrency", false, + HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", "hive.support.concurrency", true, "Whether Hive supports concurrency control or not. \n" + "A ZooKeeper instance must be up and running when using zookeeper Hive lock manager "),