diff --git ql/src/java/org/apache/hadoop/hive/ql/Context.java ql/src/java/org/apache/hadoop/hive/ql/Context.java index 9183edf..ddd5802 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Context.java +++ ql/src/java/org/apache/hadoop/hive/ql/Context.java @@ -783,6 +783,13 @@ public TokenRewriteStream getTokenRewriteStream() { } /** + * Obtain the unique executionId. + */ + public String getExecutionId() { + return executionId; + } + + /** * Generate a unique executionId. An executionId, together with user name and * the configuration, will determine the temporary locations of all intermediate * files. diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java index 78c511b..eb1feaf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java @@ -32,18 +32,15 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; -import org.apache.hadoop.hive.metastore.TransactionalValidationListener; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.hooks.Entity; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; -import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; @@ -284,22 +281,36 @@ private void cleanUpMetaColumnAccessControl() { } } /** - * Parse the newly generated SQL statment to get a new AST + * Parse the newly generated SQL statement to get a new AST */ - private ReparseResult parseRewrittenQuery(StringBuilder rewrittenQueryStr, String originalQuery) throws SemanticException { + private ReparseResult parseRewrittenQuery(StringBuilder rewrittenQueryStr, String originalQuery) + throws SemanticException { + return parseRewrittenQuery(rewrittenQueryStr, null, originalQuery); + } + /** + * Parse the newly generated SQL statement to get a new AST + */ + private ReparseResult parseRewrittenQuery(StringBuilder rewrittenQueryStr, String executionId, String originalQuery) + throws SemanticException { + // Set dynamic partitioning to nonstrict so that queries do not need any partition + // references. + // todo: this may be a perf issue as it prevents the optimizer.. or not + HiveConf.setVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); // Parse the rewritten query string Context rewrittenCtx; try { - // Set dynamic partitioning to nonstrict so that queries do not need any partition - // references. - // todo: this may be a perf issue as it prevents the optimizer.. or not - HiveConf.setVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); - rewrittenCtx = new Context(conf); - rewrittenCtx.setExplainConfig(ctx.getExplainConfig()); - rewrittenCtx.setIsUpdateDeleteMerge(true); + if (executionId != null) { + // Reuse id for update / delete rewrite + rewrittenCtx = new Context(conf, executionId); + } else { + // Create new id for merge rewrite + rewrittenCtx = new Context(conf); + } } catch (IOException e) { throw new SemanticException(ErrorMsg.UPDATEDELETE_IO_ERROR.getMsg()); } + rewrittenCtx.setExplainConfig(ctx.getExplainConfig()); + rewrittenCtx.setIsUpdateDeleteMerge(true); rewrittenCtx.setCmd(rewrittenQueryStr.toString()); ASTNode rewrittenTree; @@ -402,7 +413,7 @@ private void reparseAndSuperAnalyze(ASTNode tree) throws SemanticException { // Add a sort by clause so that the row ids come out in the correct order rewrittenQueryStr.append(" sort by ROW__ID "); - ReparseResult rr = parseRewrittenQuery(rewrittenQueryStr, ctx.getCmd()); + ReparseResult rr = parseRewrittenQuery(rewrittenQueryStr, ctx.getExecutionId(), ctx.getCmd()); Context rewrittenCtx = rr.rewrittenCtx; ASTNode rewrittenTree = rr.rewrittenTree;