diff --git ql/src/java/org/apache/hadoop/hive/ql/Context.java ql/src/java/org/apache/hadoop/hive/ql/Context.java index 5e92980841..318c2071d3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Context.java +++ ql/src/java/org/apache/hadoop/hive/ql/Context.java @@ -121,7 +121,7 @@ // Some statements, e.g., UPDATE, DELETE, or MERGE, get rewritten into different // subqueries that create new contexts. We keep them here so we can clean them // up when we are done. - private final Set rewrittenStatementContexts; + private final Set subContexts; // List of Locks for this query protected List hiveLocks; @@ -325,7 +325,7 @@ public Context(Configuration conf) throws IOException { private Context(Configuration conf, String executionId) { this.conf = conf; this.executionId = executionId; - this.rewrittenStatementContexts = new HashSet<>(); + this.subContexts = new HashSet<>(); // local & non-local tmp location is configurable. however it is the same across // all external file systems @@ -343,7 +343,7 @@ protected Context(Context ctx) { // hence it needs to be used carefully. In particular, following objects // are ignored: // opContext, pathToCS, cboInfo, cboSucceeded, tokenRewriteStream, viewsTokenRewriteStreams, - // rewrittenStatementContexts, cteTables, loadTableOutputMap, planMapper, insertBranchToNamePrefix + // subContexts, cteTables, loadTableOutputMap, planMapper, insertBranchToNamePrefix this.isHDFSCleanup = ctx.isHDFSCleanup; this.resFile = ctx.resFile; this.resDir = ctx.resDir; @@ -378,7 +378,7 @@ protected Context(Context ctx) { this.statsSource = ctx.statsSource; this.executionIndex = ctx.executionIndex; this.viewsTokenRewriteStreams = new HashMap<>(); - this.rewrittenStatementContexts = new HashSet<>(); + this.subContexts = new HashSet<>(); this.opContext = new CompilationOpContext(); } @@ -857,7 +857,7 @@ public void clear() throws IOException{ public void clear(boolean deleteResultDir) throws IOException { // First clear the other contexts created by this query - for (Context subContext : rewrittenStatementContexts) { + for (Context subContext : subContexts) { subContext.clear(); } // Then clear this context @@ -1057,8 +1057,8 @@ public void restoreOriginalTracker() { } } - public void addRewrittenStatementContext(Context context) { - rewrittenStatementContexts.add(context); + public void addSubContext(Context context) { + subContexts.add(context); } public void addCS(String path, ContentSummary cs) { @@ -1132,7 +1132,7 @@ public String getCalcitePlan() { return this.calcitePlan; } - for (Context context : rewrittenStatementContexts) { + for (Context context : subContexts) { if (context.calcitePlan != null) { return context.calcitePlan; } diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 517b0cc443..e70c92eef4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -683,7 +683,7 @@ private void runInternal(String command, boolean alreadyCompiled) throws Command // and then, we acquire locks. If snapshot is still valid, we continue as usual. // But if snapshot is not valid, we recompile the query. driverContext.setRetrial(true); - driverContext.getBackupContext().addRewrittenStatementContext(context); + driverContext.getBackupContext().addSubContext(context); driverContext.getBackupContext().setHiveLocks(context.getHiveLocks()); context = driverContext.getBackupContext(); driverContext.getConf().set(ValidTxnList.VALID_TXNS_KEY, diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rebuild/AlterMaterializedViewRebuildAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rebuild/AlterMaterializedViewRebuildAnalyzer.java index d1d0e3cbe6..7ff2d1948c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rebuild/AlterMaterializedViewRebuildAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rebuild/AlterMaterializedViewRebuildAnalyzer.java @@ -94,7 +94,7 @@ private ASTNode getRewrittenAST(TableName tableName) throws SemanticException { String rewrittenInsertStatement = String.format(REWRITTEN_INSERT_STATEMENT, tableName.getEscapedNotEmptyDbTable(), viewText); rewrittenAST = ParseUtils.parse(rewrittenInsertStatement, ctx); - this.ctx.addRewrittenStatementContext(ctx); + this.ctx.addSubContext(ctx); if (!this.ctx.isExplainPlan() && AcidUtils.isTransactionalTable(table)) { // Acquire lock for the given materialized view. Only one rebuild per materialized view can be triggered at a diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java index 997132fc06..d25cadf7ea 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java @@ -161,6 +161,7 @@ private Operator genSelOp(String command, boolean rewritten, Context origCtx) throws IOException, ParseException, SemanticException { // 1. initialization Context ctx = new Context(conf); + origCtx.addSubContext(ctx); ctx.setOpContext(origCtx.getOpContext()); ctx.setExplainConfig(origCtx.getExplainConfig()); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java index b8231d276f..2787b47b2e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java @@ -36,12 +36,10 @@ import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; -import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; @@ -420,7 +418,7 @@ public void analyze(ASTNode ast, Context origCtx) throws SemanticException { analyzeRewrite.setColName(colNames); analyzeRewrite.setColType(colType); qbp.setAnalyzeRewrite(analyzeRewrite); - origCtx.addRewrittenStatementContext(ctx); + origCtx.addSubContext(ctx); initCtx(ctx); ctx.setExplainConfig(origCtx.getExplainConfig()); LOG.info("Invoking analyze on rewritten query"); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java index 58b2615660..5dac729679 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java @@ -534,7 +534,7 @@ private void reparseAndSuperAnalyze(Table table, URI fromURI) throws SemanticExc rewrittenCtx = new Context(conf); // We keep track of all the contexts that are created by this query // so we can clear them when we finish execution - ctx.addRewrittenStatementContext(rewrittenCtx); + ctx.addSubContext(rewrittenCtx); } catch (IOException e) { throw new SemanticException(ErrorMsg.LOAD_DATA_LAUNCH_JOB_IO_ERROR.getMsg()); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java index 7b25030442..0b19f178b5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java @@ -262,7 +262,7 @@ protected ReparseResult parseRewrittenQuery(StringBuilder rewrittenQueryStr, Str rewrittenCtx.setHDFSCleanup(true); // We keep track of all the contexts that are created by this query // so we can clear them when we finish execution - ctx.addRewrittenStatementContext(rewrittenCtx); + ctx.addSubContext(rewrittenCtx); } catch (IOException e) { throw new SemanticException(ErrorMsg.UPDATEDELETE_IO_ERROR.getMsg()); }