diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java index 6ff48ee..180e802 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java @@ -356,7 +356,7 @@ private Path createInputFile() throws IOException { * @throws Exception if any error occurs */ private List getTableData(String table, String database) throws Exception { - QueryState queryState = new QueryState(null); + QueryState queryState = new QueryState.Builder().build(); HiveConf conf = queryState.getConf(); conf.addResource("hive-site.xml"); ArrayList results = new ArrayList(); diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index e2c3992..ddbe6db 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -755,6 +755,7 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\ beeline.positive.include=drop_with_concurrency.q,\ escape_comments.q,\ mapjoin2.q,\ + materialized_view_create_rewrite.q,\ smb_mapjoin_1.q,\ smb_mapjoin_10.q,\ smb_mapjoin_11.q,\ diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index b897ffa..d408321 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -566,7 +566,7 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation()); } - queryState = new QueryState(new HiveConf(Driver.class)); + queryState = new QueryState.Builder().withHiveConf(new HiveConf(Driver.class)).build(); if (useHBaseMetastore) { startMiniHBaseCluster(); } else { @@ -1896,7 +1896,7 @@ public ASTNode parseQuery(String tname) throws Exception { public void resetParser() throws SemanticException { drv.init(); pd = new ParseDriver(); - queryState = new QueryState(conf); + queryState = new QueryState.Builder().withHiveConf(conf).build(); sem = new SemanticAnalyzer(queryState); } diff --git itests/util/src/main/java/org/apache/hive/beeline/QFile.java itests/util/src/main/java/org/apache/hive/beeline/QFile.java index 3d9ca99..af464b9 100644 --- itests/util/src/main/java/org/apache/hive/beeline/QFile.java +++ itests/util/src/main/java/org/apache/hive/beeline/QFile.java @@ -63,6 +63,7 @@ private static final String MASK_PATTERN = "#### A masked pattern was here ####\n"; private String name; + private String databaseName; private File inputFile; private File rawOutputFile; private File outputFile; @@ -81,6 +82,10 @@ public String getName() { return name; } + public String getDatabaseName() { + return databaseName; + } + public File getInputFile() { return inputFile; } @@ -163,8 +168,8 @@ private String replaceTableNames(String source) { */ private String revertReplaceTableNames(String source) { for (String table : srcTables) { - source = source.replaceAll("(?is)(\\s+)default\\." + table + "([\\s;\\n\\)])", "$1" + table - + "$2"); + source = source.replaceAll("(?is)(? confOverlay, boolean runAsync) { - this.queryConf = createConf(conf, confOverlay, runAsync); - } - /** - * If there are query specific settings to overlay, then create a copy of config - * There are two cases we need to clone the session config that's being passed to hive driver - * 1. Async query - - * If the client changes a config setting, that shouldn't reflect in the execution already underway - * 2. confOverlay - - * The query specific settings should only be applied to the query config and not session - * @return new configuration + * Private constructor, use QueryState.Builder instead + * @param conf The query specific configuration object */ - private HiveConf createConf(HiveConf conf, - Map confOverlay, - boolean runAsync) { - - if ( (confOverlay != null && !confOverlay.isEmpty()) ) { - conf = (conf == null ? new HiveConf() : new HiveConf(conf)); - - // apply overlay query specific settings, if any - for (Map.Entry confEntry : confOverlay.entrySet()) { - try { - conf.verifyAndSet(confEntry.getKey(), confEntry.getValue()); - } catch (IllegalArgumentException e) { - throw new RuntimeException("Error applying statement specific settings", e); - } - } - } else if (runAsync) { - conf = (conf == null ? new HiveConf() : new HiveConf(conf)); - } - - if (conf == null) { - conf = new HiveConf(); - } - - conf.setVar(HiveConf.ConfVars.HIVEQUERYID, QueryPlan.makeQueryId()); - return conf; + private QueryState(HiveConf conf) { + this.queryConf = conf; } public String getQueryId() { @@ -109,4 +72,105 @@ public void setCommandType(HiveOperation commandType) { public HiveConf getConf() { return queryConf; } + + /** + * Builder to instantiate the QueryState object. + */ + public static class Builder { + private Map confOverlay = null; + private boolean runAsync = false; + private boolean generateNewQueryId = false; + private HiveConf hiveConf = null; + + /** + * Default constructor - use this builder to create a QueryState object + */ + public Builder() { + } + + /** + * Set this to true if the configuration should be detached from the original config. If not + * set the default value is false. + * @param runAsync If the configuration should be detached + * @return The builder + */ + public Builder withRunAsync(boolean runAsync) { + this.runAsync = runAsync; + return this; + } + + /** + * Set this if there are specific configuration values which should be added to the original + * config. If at least one value is set, then the configuration will be detached from the + * original one. + * @param confOverlay The query specific parameters + * @return The builder + */ + public Builder withConfOverlay(Map confOverlay) { + this.confOverlay = confOverlay; + return this; + } + + /** + * Set this to true if new queryId should be generated, otherwise the original one will be kept. + * If not set the default value is false. + * @param generateNewQueryId If new queryId should be generated + * @return The builder + */ + public Builder withGenerateNewQueryId(boolean generateNewQueryId) { + this.generateNewQueryId = generateNewQueryId; + return this; + } + + /** + * The source HiveConf object used to create the QueryState. If runAsync is false, and the + * confOverLay is empty then we will reuse the hiveConf object as a backing datastore for the + * QueryState. We will create a clone of the hiveConf object otherwise. + * @param hiveConf The source HiveConf + * @return The builder + */ + public Builder withHiveConf(HiveConf hiveConf) { + this.hiveConf = hiveConf; + return this; + } + + /** + * Creates the QueryState object. The default values are: + * - runAsync false + * - confOverlay null + * - generateNewQueryId false + * - hiveConf null + * @return The generated QueryState object + */ + public QueryState build() { + HiveConf queryConf = hiveConf; + + if (queryConf == null) { + // Generate a new conf if necessary + queryConf = new HiveConf(); + } else if (runAsync || (confOverlay != null && !confOverlay.isEmpty())) { + // Detach the original conf if necessary + queryConf = new HiveConf(queryConf); + } + + // Set the specific parameters if needed + if (confOverlay != null && !confOverlay.isEmpty()) { + // apply overlay query specific settings, if any + for (Map.Entry confEntry : confOverlay.entrySet()) { + try { + queryConf.verifyAndSet(confEntry.getKey(), confEntry.getValue()); + } catch (IllegalArgumentException e) { + throw new RuntimeException("Error applying statement specific settings", e); + } + } + } + + // Generate the new queryId if needed + if (generateNewQueryId) { + queryConf.setVar(HiveConf.ConfVars.HIVEQUERYID, QueryPlan.makeQueryId()); + } + + return new QueryState(queryConf); + } + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index cf575de..b07d6b1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -740,7 +740,7 @@ private int mergeFiles(Hive db, AlterTablePartMergeFilesDesc mergeFilesDesc, DriverContext driverCxt = new DriverContext(); Task task; if (conf.getVar(ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { - TezWork tezWork = new TezWork(conf.getVar(HiveConf.ConfVars.HIVEQUERYID), conf); + TezWork tezWork = new TezWork(queryState.getQueryId(), conf); mergeWork.setName("File Merge"); tezWork.add(mergeWork); task = new TezTask(); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java index 77bce97..ad921f3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java @@ -344,7 +344,8 @@ public static void main(String[] args) { } } - QueryState queryState = new QueryState(new HiveConf(conf, PartialScanTask.class)); + QueryState queryState = + new QueryState.Builder().withHiveConf(new HiveConf(conf, PartialScanTask.class)).build(); PartialScanWork mergeWork = new PartialScanWork(inputPaths); DriverContext driverCxt = new DriverContext(); PartialScanTask taskExec = new PartialScanTask(); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java index b121eea..56c0163 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java @@ -60,7 +60,6 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.CalcitePlanner; -import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.ParseUtils; import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.parse.RowResolver; @@ -70,7 +69,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; -import org.joda.time.Interval; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -330,7 +328,8 @@ private static RelNode createTableScan(Table viewTable) { private static RelNode parseQuery(String viewQuery) { try { final ASTNode node = ParseUtils.parse(viewQuery); - final QueryState qs = new QueryState(SessionState.get().getConf()); + final QueryState qs = + new QueryState.Builder().withHiveConf(SessionState.get().getConf()).build(); CalcitePlanner analyzer = new CalcitePlanner(qs); analyzer.initCtx(new Context(SessionState.get().getConf())); analyzer.init(false); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java index 3b719af..d72ff5cd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -33,18 +32,15 @@ import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.SelectOperator; -import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.AnalyzeRewriteContext; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.LoadFileDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.SelectDesc; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -135,12 +131,13 @@ private Operator genSelOpForAnalyze(String analyzeCommand, Context origCtx) thro ASTNode tree = ParseUtils.parse(analyzeCommand, ctx); //1. get the ColumnStatsSemanticAnalyzer - BaseSemanticAnalyzer baseSem = SemanticAnalyzerFactory.get(new QueryState(conf), tree); + QueryState queryState = new QueryState.Builder().withHiveConf(conf).build(); + BaseSemanticAnalyzer baseSem = SemanticAnalyzerFactory.get(queryState, tree); ColumnStatsSemanticAnalyzer colSem = (ColumnStatsSemanticAnalyzer) baseSem; //2. get the rewritten AST ASTNode ast = colSem.rewriteAST(tree, this); - baseSem = SemanticAnalyzerFactory.get(new QueryState(conf), ast); + baseSem = SemanticAnalyzerFactory.get(queryState, ast); SemanticAnalyzer sem = (SemanticAnalyzer) baseSem; QB qb = new QB(null, null, false); ASTNode child = ast; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index c7266bc..b4898e2 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -62,7 +62,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.mapred.TextInputFormat; -import org.apache.hadoop.util.Shell; /** * Mimics the actual query compiler in generating end to end plans and testing @@ -83,7 +82,7 @@ static { try { - queryState = new QueryState(new HiveConf(ExecDriver.class)); + queryState = new QueryState.Builder().withHiveConf(new HiveConf(ExecDriver.class)).build(); conf = queryState.getConf(); conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true); conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, true); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java index c734988..deba1d5 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java @@ -42,7 +42,7 @@ @Before public void setup() throws Exception { - queryState = new QueryState(null); + queryState = new QueryState.Builder().build(); conf = queryState.getConf(); SessionState.start(conf); context = new Context(conf); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java index 201622e..182ac2b 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java @@ -50,7 +50,7 @@ @BeforeClass public static void init() throws Exception { - queryState = new QueryState(null); + queryState = new QueryState.Builder().build(); conf = queryState.getConf(); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java index e607f10..45901c9 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java @@ -38,7 +38,8 @@ @BeforeClass public static void initialize() { - queryState = new QueryState(new HiveConf(SemanticAnalyzer.class)); + queryState = + new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build(); conf = queryState.getConf(); SessionState.start(conf); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java index 2674835..993b4da 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java @@ -48,7 +48,8 @@ @BeforeClass public static void initialize() { - queryState = new QueryState(new HiveConf(SemanticAnalyzer.class)); + queryState = + new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build(); conf = queryState.getConf(); SessionState.start(conf); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java index 80865bd..1cb4470 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java @@ -56,7 +56,8 @@ @BeforeClass public static void initialize() throws HiveException { - queryState = new QueryState(new HiveConf(SemanticAnalyzer.class)); + queryState = + new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build(); conf = queryState.getConf(); conf.set("hive.security.authorization.manager", ""); SessionState.start(conf); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java index 5849950..b19d42f 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java @@ -32,7 +32,7 @@ @Before public void setup() throws Exception { - queryState = new QueryState(null); + queryState = new QueryState.Builder().build(); conf = queryState.getConf(); } @Test diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java index a573808..9c20521 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java @@ -223,7 +223,7 @@ public void testInsertValuesPartitioned() throws Exception { @Before public void setup() { - queryState = new QueryState(null); + queryState = new QueryState.Builder().build(); conf = queryState.getConf(); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java index 58cb4b4..746aa4b 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java @@ -99,7 +99,7 @@ public static void reset() { @Before public void setup() throws Exception { - queryState = new QueryState(null); + queryState = new QueryState.Builder().build(); HiveConf conf = queryState.getConf(); conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY, TestHiveAuthorizationTaskFactory.DummyHiveAuthorizationTaskFactoryImpl.class.getName()); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java index 5d01080..349f494 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java @@ -19,8 +19,6 @@ import java.util.HashMap; -import junit.framework.Assert; - import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -41,7 +39,7 @@ @Before public void setup() throws Exception { - queryState = new QueryState(null); + queryState = new QueryState.Builder().build(); db = Mockito.mock(Hive.class); table = new Table(DB, TABLE); partition = new Partition(table); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java index c552ba7..312770f 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java @@ -40,7 +40,7 @@ @Before public void setup() throws Exception { - queryState = new QueryState(null); + queryState = new QueryState.Builder().build(); //set authorization mode to V2 HiveConf conf = queryState.getConf(); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out new file mode 100644 index 0000000..041621f --- /dev/null +++ ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out @@ -0,0 +1,322 @@ +PREHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2), d int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2), d int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_basetable +PREHOOK: query: insert into cmv_basetable values + (1, 'alfred', 10.30, 2), + (2, 'bob', 3.14, 3), + (2, 'bonnie', 172342.2, 3), + (3, 'calvin', 978.76, 3), + (3, 'charlie', 9.8, 1) +PREHOOK: type: QUERY +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: insert into cmv_basetable values + (1, 'alfred', 10.30, 2), + (2, 'bob', 3.14, 3), + (2, 'bonnie', 172342.2, 3), + (3, 'calvin', 978.76, 3), + (3, 'charlie', 9.8, 1) +POSTHOOK: type: QUERY +POSTHOOK: Output: default@cmv_basetable +POSTHOOK: Lineage: cmv_basetable.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.d EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col4, type:string, comment:), ] +PREHOOK: query: create materialized view cmv_mat_view enable rewrite +as select a, b, c from cmv_basetable where a = 2 +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view +POSTHOOK: query: create materialized view cmv_mat_view enable rewrite +as select a, b, c from cmv_basetable where a = 2 +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view +PREHOOK: query: select * from cmv_mat_view +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view +#### A masked pattern was here #### +POSTHOOK: query: select * from cmv_mat_view +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view +#### A masked pattern was here #### +2 bob 3.14 +2 bonnie 172342.20 +PREHOOK: query: show tblproperties cmv_mat_view +PREHOOK: type: SHOW_TBLPROPERTIES +POSTHOOK: query: show tblproperties cmv_mat_view +POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 1 +totalSize 453 +#### A masked pattern was here #### +PREHOOK: query: create materialized view if not exists cmv_mat_view2 enable rewrite +as select a, c from cmv_basetable where a = 3 +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view2 +POSTHOOK: query: create materialized view if not exists cmv_mat_view2 enable rewrite +as select a, c from cmv_basetable where a = 3 +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view2 +PREHOOK: query: select * from cmv_mat_view2 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +POSTHOOK: query: select * from cmv_mat_view2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +3 978.76 +3 9.80 +PREHOOK: query: show tblproperties cmv_mat_view2 +PREHOOK: type: SHOW_TBLPROPERTIES +POSTHOOK: query: show tblproperties cmv_mat_view2 +POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 1 +totalSize 322 +#### A masked pattern was here #### +PREHOOK: query: explain +select a, c from cmv_basetable where a = 3 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a, c from cmv_basetable where a = 3 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: default.cmv_mat_view2 + Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: a (type: int), c (type: decimal(10,2)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE + ListSink + +PREHOOK: query: select a, c from cmv_basetable where a = 3 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_basetable +PREHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +POSTHOOK: query: select a, c from cmv_basetable where a = 3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +3 978.76 +3 9.80 +Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: default.cmv_mat_view2 + Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: c (type: decimal(10,2)) + outputColumnNames: _col0 + Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(10,2)) + TableScan + alias: cmv_basetable + Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((d = 3) and (3 = a)) (type: boolean) + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: c (type: decimal(10,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(10,2)) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 3 (type: int), _col0 (type: decimal(10,2)), 3 (type: int), _col1 (type: decimal(10,2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_basetable +PREHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +POSTHOOK: query: select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +3 9.80 3 978.76 +3 978.76 3 978.76 +PREHOOK: query: drop materialized view cmv_mat_view2 +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_mat_view2 +PREHOOK: Output: default@cmv_mat_view2 +POSTHOOK: query: drop materialized view cmv_mat_view2 +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_mat_view2 +POSTHOOK: Output: default@cmv_mat_view2 +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: cmv_basetable + Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (a = 3) (type: boolean) + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: c (type: decimal(10,2)) + outputColumnNames: _col0 + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(10,2)) + TableScan + alias: cmv_basetable + Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((d = 3) and (3 = a)) (type: boolean) + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: c (type: decimal(10,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(10,2)) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 3 (type: int), _col0 (type: decimal(10,2)), 3 (type: int), _col1 (type: decimal(10,2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_basetable +#### A masked pattern was here #### +POSTHOOK: query: select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_basetable +#### A masked pattern was here #### +3 9.80 3 978.76 +3 978.76 3 978.76 diff --git service/src/java/org/apache/hive/service/cli/operation/Operation.java service/src/java/org/apache/hive/service/cli/operation/Operation.java index 0b27608..4e78551 100644 --- service/src/java/org/apache/hive/service/cli/operation/Operation.java +++ service/src/java/org/apache/hive/service/cli/operation/Operation.java @@ -20,7 +20,6 @@ import java.io.File; import java.io.FileNotFoundException; import java.util.EnumSet; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -63,7 +62,6 @@ protected volatile Future backgroundHandle; protected OperationLog operationLog; protected boolean isOperationLogEnabled; - protected Map confOverlay = new HashMap(); private long operationTimeout; private volatile long lastAccessTime; @@ -90,9 +88,6 @@ protected Operation(HiveSession parentSession, Map confOverlay, protected Operation(HiveSession parentSession, Map confOverlay, OperationType opType, boolean isAsyncQueryState) { this.parentSession = parentSession; - if (confOverlay != null) { - this.confOverlay = confOverlay; - } this.opHandle = new OperationHandle(opType, parentSession.getProtocolVersion()); beginTime = System.currentTimeMillis(); lastAccessTime = beginTime; @@ -101,7 +96,12 @@ protected Operation(HiveSession parentSession, currentStateScope = updateOperationStateMetrics(null, MetricsConstant.OPERATION_PREFIX, MetricsConstant.COMPLETED_OPERATION_PREFIX, state); - queryState = new QueryState(parentSession.getHiveConf(), confOverlay, isAsyncQueryState); + queryState = new QueryState.Builder() + .withConfOverlay(confOverlay) + .withRunAsync(isAsyncQueryState) + .withGenerateNewQueryId(true) + .withHiveConf(parentSession.getHiveConf()) + .build(); } public Future getBackgroundHandle() { diff --git service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java index 0b51591..1a2be8b 100644 --- service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java @@ -169,7 +169,7 @@ public void prepare(QueryState queryState) throws HiveSQLException { @Override public void run() { try { - String queryId = confOverlay.get(HiveConf.ConfVars.HIVEQUERYID.varname); + String queryId = queryState.getQueryId(); LOG.info("Query timed out after: " + queryTimeout + " seconds. Cancelling the execution now: " + queryId); SQLOperation.this.cancel(OperationState.TIMEDOUT); @@ -397,7 +397,7 @@ private synchronized void cleanup(OperationState state) throws HiveSQLException Future backgroundHandle = getBackgroundHandle(); if (backgroundHandle != null) { boolean success = backgroundHandle.cancel(true); - String queryId = confOverlay.get(HiveConf.ConfVars.HIVEQUERYID.varname); + String queryId = queryState.getQueryId(); if (success) { LOG.info("The running operation has been successfully interrupted: " + queryId); } else if (state == OperationState.CANCELED) { @@ -430,7 +430,7 @@ private synchronized void cleanup(OperationState state) throws HiveSQLException public void cancel(OperationState stateAfterCancel) throws HiveSQLException { String queryId = null; if (stateAfterCancel == OperationState.CANCELED) { - queryId = confOverlay.get(HiveConf.ConfVars.HIVEQUERYID.varname); + queryId = queryState.getQueryId(); LOG.info("Cancelling the query execution: " + queryId); } cleanup(stateAfterCancel);