diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java index 6ff48ee..0877ede 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -356,7 +357,7 @@ private Path createInputFile() throws IOException { * @throws Exception if any error occurs */ private List getTableData(String table, String database) throws Exception { - QueryState queryState = new QueryState(null); + QueryState queryState = new QueryStateBuilder().build(null); HiveConf conf = queryState.getConf(); conf.addResource("hive-site.xml"); ArrayList results = new ArrayList(); diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index 5ab3076..3c93c8d 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -753,6 +753,7 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\ beeline.positive.include=drop_with_concurrency.q,\ escape_comments.q,\ + materialized_view_create_rewrite.q,\ smb_mapjoin_1.q,\ smb_mapjoin_10.q,\ smb_mapjoin_11.q,\ diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index b897ffa..adcb5d6 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -126,6 +126,7 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.processors.HiveCommand; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hive.common.util.StreamPrinter; @@ -566,7 +567,7 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation()); } - queryState = new QueryState(new HiveConf(Driver.class)); + queryState = new QueryStateBuilder().build(new HiveConf(Driver.class)); if (useHBaseMetastore) { startMiniHBaseCluster(); } else { @@ -1896,7 +1897,7 @@ public ASTNode parseQuery(String tname) throws Exception { public void resetParser() throws SemanticException { drv.init(); pd = new ParseDriver(); - queryState = new QueryState(conf); + queryState = new QueryStateBuilder().build(conf); sem = new SemanticAnalyzer(queryState); } diff --git itests/util/src/main/java/org/apache/hive/beeline/QFile.java itests/util/src/main/java/org/apache/hive/beeline/QFile.java index 3d9ca99..af464b9 100644 --- itests/util/src/main/java/org/apache/hive/beeline/QFile.java +++ itests/util/src/main/java/org/apache/hive/beeline/QFile.java @@ -63,6 +63,7 @@ private static final String MASK_PATTERN = "#### A masked pattern was here ####\n"; private String name; + private String databaseName; private File inputFile; private File rawOutputFile; private File outputFile; @@ -81,6 +82,10 @@ public String getName() { return name; } + public String getDatabaseName() { + return databaseName; + } + public File getInputFile() { return inputFile; } @@ -163,8 +168,8 @@ private String replaceTableNames(String source) { */ private String revertReplaceTableNames(String source) { for (String table : srcTables) { - source = source.replaceAll("(?is)(\\s+)default\\." + table + "([\\s;\\n\\)])", "$1" + table - + "$2"); + source = source.replaceAll("(?is)(? confOverlay, boolean runAsync) { - this.queryConf = createConf(conf, confOverlay, runAsync); - } - /** - * If there are query specific settings to overlay, then create a copy of config - * There are two cases we need to clone the session config that's being passed to hive driver - * 1. Async query - - * If the client changes a config setting, that shouldn't reflect in the execution already underway - * 2. confOverlay - - * The query specific settings should only be applied to the query config and not session - * @return new configuration + * Private constructor, use QueryStateBuilder instead + * @param conf The query specific configuration object */ - private HiveConf createConf(HiveConf conf, - Map confOverlay, - boolean runAsync) { - - if ( (confOverlay != null && !confOverlay.isEmpty()) ) { - conf = (conf == null ? new HiveConf() : new HiveConf(conf)); - - // apply overlay query specific settings, if any - for (Map.Entry confEntry : confOverlay.entrySet()) { - try { - conf.verifyAndSet(confEntry.getKey(), confEntry.getValue()); - } catch (IllegalArgumentException e) { - throw new RuntimeException("Error applying statement specific settings", e); - } - } - } else if (runAsync) { - conf = (conf == null ? new HiveConf() : new HiveConf(conf)); - } - - if (conf == null) { - conf = new HiveConf(); - } - - conf.setVar(HiveConf.ConfVars.HIVEQUERYID, QueryPlan.makeQueryId()); - return conf; + private QueryState(HiveConf conf) { + this.queryConf = conf; } public String getQueryId() { @@ -109,4 +72,92 @@ public void setCommandType(HiveOperation commandType) { public HiveConf getConf() { return queryConf; } + + /** + * Builder to instantiate the QueryState object. + */ + public static class QueryStateBuilder { + private Map confOverlay = null; + boolean runAsync = false; + boolean generateNewQueryId = false; + + /** + * Default constructor - use this builder to create a QueryState object + */ + public QueryStateBuilder() { + } + + /** + * Set this to true if the configuration should be detached from the original config. If not + * set the default value is false. + * @param runAsync If the configuration should be detached + * @return The builder + */ + public QueryStateBuilder setRunAsync(boolean runAsync) { + this.runAsync = runAsync; + return this; + } + + /** + * Set this if there are specific configuration values which should be added to the original + * config. If at least one value is set, then the configuration will be detached from the + * original one. + * @param confOverlay The query specific parameters + * @return The builder + */ + public QueryStateBuilder setConfOverlay(Map confOverlay) { + this.confOverlay = confOverlay; + return this; + } + + /** + * Set this to true if new queryId should be generated, otherwise the original one will be kept. + * If not set the default value is false. + * @param generateNewQueryId If new queryId should be generated + * @return The builder + */ + public QueryStateBuilder setGenerateNewQueryId(boolean generateNewQueryId) { + this.generateNewQueryId = generateNewQueryId; + return this; + } + + /** + * Creates the QueryState object. The default values are: + * - runAsync false + * - confOverlay null + * - generateNewQueryId false + * @param conf The configuration object we should use + * @return The generated QueryState object + */ + public QueryState build(HiveConf conf) { + HiveConf queryConf = conf; + + if (conf == null) { + // Generate a new conf if necessary + queryConf = new HiveConf(); + } else if (runAsync || (confOverlay != null && !confOverlay.isEmpty())) { + // Detach the original conf if necessary + queryConf = new HiveConf(queryConf); + } + + // Set the specific parameters if needed + if (confOverlay != null && !confOverlay.isEmpty()) { + // apply overlay query specific settings, if any + for (Map.Entry confEntry : confOverlay.entrySet()) { + try { + queryConf.verifyAndSet(confEntry.getKey(), confEntry.getValue()); + } catch (IllegalArgumentException e) { + throw new RuntimeException("Error applying statement specific settings", e); + } + } + } + + // Generate the new queryId if needed + if (generateNewQueryId) { + conf.setVar(HiveConf.ConfVars.HIVEQUERYID, QueryPlan.makeQueryId()); + } + + return new QueryState(queryConf); + } + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java index 77bce97..762a2b6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper; @@ -344,7 +345,8 @@ public static void main(String[] args) { } } - QueryState queryState = new QueryState(new HiveConf(conf, PartialScanTask.class)); + QueryState queryState = + new QueryStateBuilder().build(new HiveConf(conf, PartialScanTask.class)); PartialScanWork mergeWork = new PartialScanWork(inputPaths); DriverContext driverCxt = new DriverContext(); PartialScanTask taskExec = new PartialScanTask(); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java index b121eea..6ee4513 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable; @@ -60,7 +61,6 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.CalcitePlanner; -import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.ParseUtils; import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.parse.RowResolver; @@ -70,7 +70,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; -import org.joda.time.Interval; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -330,7 +329,7 @@ private static RelNode createTableScan(Table viewTable) { private static RelNode parseQuery(String viewQuery) { try { final ASTNode node = ParseUtils.parse(viewQuery); - final QueryState qs = new QueryState(SessionState.get().getConf()); + final QueryState qs = new QueryStateBuilder().build(SessionState.get().getConf()); CalcitePlanner analyzer = new CalcitePlanner(qs); analyzer.initCtx(new Context(SessionState.get().getConf())); analyzer.init(false); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java index 3b719af..fb8a1e4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -29,22 +28,20 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.SelectOperator; -import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.AnalyzeRewriteContext; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.LoadFileDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.SelectDesc; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -135,12 +132,13 @@ private Operator genSelOpForAnalyze(String analyzeCommand, Context origCtx) thro ASTNode tree = ParseUtils.parse(analyzeCommand, ctx); //1. get the ColumnStatsSemanticAnalyzer - BaseSemanticAnalyzer baseSem = SemanticAnalyzerFactory.get(new QueryState(conf), tree); + QueryState queryState = new QueryStateBuilder().build(conf); + BaseSemanticAnalyzer baseSem = SemanticAnalyzerFactory.get(queryState, tree); ColumnStatsSemanticAnalyzer colSem = (ColumnStatsSemanticAnalyzer) baseSem; //2. get the rewritten AST ASTNode ast = colSem.rewriteAST(tree, this); - baseSem = SemanticAnalyzerFactory.get(new QueryState(conf), ast); + baseSem = SemanticAnalyzerFactory.get(queryState, ast); SemanticAnalyzer sem = (SemanticAnalyzer) baseSem; QB qb = new QB(null, null, false); ASTNode child = ast; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index c7266bc..cd2ff90 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.io.AcidUtils; @@ -83,7 +84,7 @@ static { try { - queryState = new QueryState(new HiveConf(ExecDriver.class)); + queryState = new QueryStateBuilder().build(new HiveConf(ExecDriver.class)); conf = queryState.getConf(); conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true); conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, true); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java index c734988..80a2be2 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.session.SessionState; @@ -42,7 +43,7 @@ @Before public void setup() throws Exception { - queryState = new QueryState(null); + queryState = new QueryStateBuilder().build(null); conf = queryState.getConf(); SessionState.start(conf); context = new Context(conf); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java index 201622e..ec8bf91 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; @@ -50,7 +51,7 @@ @BeforeClass public static void init() throws Exception { - queryState = new QueryState(null); + queryState = new QueryStateBuilder().build(null); conf = queryState.getConf(); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java index e607f10..d62be95 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Before; import org.junit.BeforeClass; @@ -38,7 +39,7 @@ @BeforeClass public static void initialize() { - queryState = new QueryState(new HiveConf(SemanticAnalyzer.class)); + queryState = new QueryStateBuilder().build(new HiveConf(SemanticAnalyzer.class)); conf = queryState.getConf(); SessionState.start(conf); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java index 2674835..676cbbb 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Assert; import org.junit.Before; @@ -48,7 +49,7 @@ @BeforeClass public static void initialize() { - queryState = new QueryState(new HiveConf(SemanticAnalyzer.class)); + queryState = new QueryStateBuilder().build(new HiveConf(SemanticAnalyzer.class)); conf = queryState.getConf(); SessionState.start(conf); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java index 80865bd..7db382c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; @@ -56,7 +57,7 @@ @BeforeClass public static void initialize() throws HiveException { - queryState = new QueryState(new HiveConf(SemanticAnalyzer.class)); + queryState = new QueryStateBuilder().build(new HiveConf(SemanticAnalyzer.class)); conf = queryState.getConf(); conf.set("hive.security.authorization.manager", ""); SessionState.start(conf); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java index 5849950..eba619e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java @@ -22,6 +22,7 @@ import org.antlr.runtime.CommonToken; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.junit.Before; import org.junit.Test; @@ -32,7 +33,7 @@ @Before public void setup() throws Exception { - queryState = new QueryState(null); + queryState = new QueryStateBuilder().build(null); conf = queryState.getConf(); } @Test diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java index a573808..dbbe81a 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; @@ -223,7 +224,7 @@ public void testInsertValuesPartitioned() throws Exception { @Before public void setup() { - queryState = new QueryState(null); + queryState = new QueryStateBuilder().build(null); conf = queryState.getConf(); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java index 58cb4b4..647d827 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; @@ -99,7 +100,7 @@ public static void reset() { @Before public void setup() throws Exception { - queryState = new QueryState(null); + queryState = new QueryStateBuilder().build(null); HiveConf conf = queryState.getConf(); conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY, TestHiveAuthorizationTaskFactory.DummyHiveAuthorizationTaskFactoryImpl.class.getName()); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java index 5d01080..5ca3524 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java @@ -19,10 +19,9 @@ import java.util.HashMap; -import junit.framework.Assert; - import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; @@ -41,7 +40,7 @@ @Before public void setup() throws Exception { - queryState = new QueryState(null); + queryState = new QueryStateBuilder().build(null); db = Mockito.mock(Hive.class); table = new Table(DB, TABLE); partition = new Partition(table); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java index c552ba7..6642764 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; @@ -40,7 +41,7 @@ @Before public void setup() throws Exception { - queryState = new QueryState(null); + queryState = new QueryStateBuilder().build(null); //set authorization mode to V2 HiveConf conf = queryState.getConf(); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, diff --git ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out new file mode 100644 index 0000000..7f8ae30 --- /dev/null +++ ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out @@ -0,0 +1,318 @@ +PREHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2), d int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2), d int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_basetable +PREHOOK: query: insert into cmv_basetable values + (1, 'alfred', 10.30, 2), + (2, 'bob', 3.14, 3), + (2, 'bonnie', 172342.2, 3), + (3, 'calvin', 978.76, 3), + (3, 'charlie', 9.8, 1) +PREHOOK: type: QUERY +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: insert into cmv_basetable values + (1, 'alfred', 10.30, 2), + (2, 'bob', 3.14, 3), + (2, 'bonnie', 172342.2, 3), + (3, 'calvin', 978.76, 3), + (3, 'charlie', 9.8, 1) +POSTHOOK: type: QUERY +POSTHOOK: Output: default@cmv_basetable +POSTHOOK: Lineage: cmv_basetable.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.d EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col4, type:string, comment:), ] +PREHOOK: query: create materialized view cmv_mat_view enable rewrite +as select a, b, c from cmv_basetable where a = 2 +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view +POSTHOOK: query: create materialized view cmv_mat_view enable rewrite +as select a, b, c from cmv_basetable where a = 2 +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view +PREHOOK: query: select * from cmv_mat_view +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view +#### A masked pattern was here #### +POSTHOOK: query: select * from cmv_mat_view +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view +#### A masked pattern was here #### +2 bob 3.14 +2 bonnie 172342.20 +PREHOOK: query: show tblproperties cmv_mat_view +PREHOOK: type: SHOW_TBLPROPERTIES +POSTHOOK: query: show tblproperties cmv_mat_view +POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 1 +totalSize 453 +#### A masked pattern was here #### +PREHOOK: query: create materialized view if not exists cmv_mat_view2 enable rewrite +as select a, c from cmv_basetable where a = 3 +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view2 +POSTHOOK: query: create materialized view if not exists cmv_mat_view2 enable rewrite +as select a, c from cmv_basetable where a = 3 +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view2 +PREHOOK: query: select * from cmv_mat_view2 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +POSTHOOK: query: select * from cmv_mat_view2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +3 978.76 +3 9.80 +PREHOOK: query: show tblproperties cmv_mat_view2 +PREHOOK: type: SHOW_TBLPROPERTIES +POSTHOOK: query: show tblproperties cmv_mat_view2 +POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 1 +totalSize 322 +#### A masked pattern was here #### +PREHOOK: query: explain +select a, c from cmv_basetable where a = 3 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a, c from cmv_basetable where a = 3 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: default.cmv_mat_view2 + Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: a (type: int), c (type: decimal(10,2)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE + ListSink + +PREHOOK: query: select a, c from cmv_basetable where a = 3 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_basetable +PREHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +POSTHOOK: query: select a, c from cmv_basetable where a = 3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +3 978.76 +3 9.80 +PREHOOK: query: explain +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: default.cmv_mat_view2 + Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: c (type: decimal(10,2)) + outputColumnNames: _col0 + Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(10,2)) + TableScan + alias: cmv_basetable + Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((d = 3) and (3 = a)) (type: boolean) + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: c (type: decimal(10,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(10,2)) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 3 (type: int), _col0 (type: decimal(10,2)), 3 (type: int), _col1 (type: decimal(10,2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_basetable +PREHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +POSTHOOK: query: select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +3 9.80 3 978.76 +3 978.76 3 978.76 +PREHOOK: query: drop materialized view cmv_mat_view2 +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_mat_view2 +PREHOOK: Output: default@cmv_mat_view2 +POSTHOOK: query: drop materialized view cmv_mat_view2 +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_mat_view2 +POSTHOOK: Output: default@cmv_mat_view2 +PREHOOK: query: explain +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: cmv_basetable + Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (a = 3) (type: boolean) + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: c (type: decimal(10,2)) + outputColumnNames: _col0 + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(10,2)) + TableScan + alias: cmv_basetable + Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((d = 3) and (3 = a)) (type: boolean) + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: c (type: decimal(10,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(10,2)) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 3 (type: int), _col0 (type: decimal(10,2)), 3 (type: int), _col1 (type: decimal(10,2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_basetable +#### A masked pattern was here #### +POSTHOOK: query: select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_basetable +#### A masked pattern was here #### +3 9.80 3 978.76 +3 978.76 3 978.76 diff --git service/src/java/org/apache/hive/service/cli/operation/Operation.java service/src/java/org/apache/hive/service/cli/operation/Operation.java index 0b27608..3418179 100644 --- service/src/java/org/apache/hive/service/cli/operation/Operation.java +++ service/src/java/org/apache/hive/service/cli/operation/Operation.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.common.metrics.common.MetricsScope; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.QueryState.QueryStateBuilder; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.OperationLog; import org.apache.hive.service.cli.FetchOrientation; @@ -101,7 +102,11 @@ protected Operation(HiveSession parentSession, currentStateScope = updateOperationStateMetrics(null, MetricsConstant.OPERATION_PREFIX, MetricsConstant.COMPLETED_OPERATION_PREFIX, state); - queryState = new QueryState(parentSession.getHiveConf(), confOverlay, isAsyncQueryState); + queryState = new QueryStateBuilder() + .setConfOverlay(confOverlay) + .setRunAsync(isAsyncQueryState) + .setGenerateNewQueryId(true) + .build(parentSession.getHiveConf()); } public Future getBackgroundHandle() {