diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index 2bcb56d..f467c81 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -121,8 +121,6 @@ public int processCmd(String cmd) { ss.updateThreadName(); - conf.set(HiveConf.ConfVars.HIVEQUERYID.varname, QueryPlan.makeQueryId()); - // Flush the print stream, so it doesn't include output from the last command ss.err.flush(); String cmd_trimmed = cmd.trim(); @@ -401,9 +399,6 @@ public void handle(Signal signal) { } ret = processCmd(command); - //wipe cli query state - SessionState ss = SessionState.get(); - ss.setCommandType(null); command = ""; lastRet = ret; boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS); diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java index 1964410..9fa263d 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java @@ -26,6 +26,7 @@ import java.util.HashMap; import java.util.List; import java.util.Random; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -40,6 +41,7 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -369,7 +371,8 @@ private Path createInputFile() throws IOException { * @throws Exception if any error occurs */ private List getTableData(String table, String database) throws Exception { - HiveConf conf = new HiveConf(); + QueryState queryState = new QueryState(null); + HiveConf conf = queryState.getConf(); conf.addResource("hive-site.xml"); ArrayList results = new ArrayList(); ArrayList temp = new ArrayList(); @@ -392,7 +395,7 @@ private Path createInputFile() throws IOException { } FetchTask task = new FetchTask(); task.setWork(work); - task.initialize(conf, null, null, new CompilationOpContext()); + task.initialize(queryState, null, null, new CompilationOpContext()); task.fetch(temp); for (String str : temp) { results.add(str.replace("\t", ",")); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 8473436..261972f 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -163,6 +163,7 @@ private static MiniClusterType clusterType = MiniClusterType.none; private ParseDriver pd; protected Hive db; + protected QueryState queryState; protected HiveConf conf; private Driver drv; private BaseSemanticAnalyzer sem; @@ -392,7 +393,8 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, if (useHBaseMetastore) { startMiniHBaseCluster(); } else { - conf = new HiveConf(Driver.class); + queryState = new QueryState(new HiveConf(Driver.class)); + conf = queryState.getConf(); } this.hadoopVer = getHadoopMainVersion(hadoopVer); qMap = new TreeMap(); @@ -922,7 +924,7 @@ public void init() throws Exception { drv = new Driver(conf); drv.init(); pd = new ParseDriver(); - sem = new SemanticAnalyzer(conf); + sem = new SemanticAnalyzer(queryState); } public void init(String tname) throws Exception { @@ -1648,7 +1650,7 @@ public ASTNode parseQuery(String tname) throws Exception { public void resetParser() throws SemanticException { drv.init(); pd = new ParseDriver(); - sem = new SemanticAnalyzer(conf); + sem = new SemanticAnalyzer(queryState); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 65ed1db..f58b665 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -164,6 +164,9 @@ // For WebUI. Kept alive after queryPlan is freed. private final QueryDisplay queryDisplay = new QueryDisplay(); + // Query specific info + private QueryState queryState; + private boolean checkConcurrency() { boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY); if (!supportConcurrency) { @@ -289,22 +292,28 @@ public void setMaxRows(int maxRows) { this.maxRows = maxRows; } + public Driver() { + this(new QueryState((SessionState.get() != null) ? + SessionState.get().getConf() : new HiveConf()), null); + } + /** * for backwards compatibility with current tests */ public Driver(HiveConf conf) { - this.conf = conf; - isParallelEnabled = (conf != null) - && HiveConf.getBoolVar(conf, ConfVars.HIVE_SERVER2_PARALLEL_COMPILATION); + this(new QueryState(conf), null); } public Driver(HiveConf conf, String userName) { - this(conf); - this.userName = userName; + this(new QueryState(conf), userName); } - public Driver() { - this((SessionState.get() != null) ? SessionState.get().getConf() : null); + public Driver(QueryState queryState, String userName) { + this.queryState = queryState; + this.conf = queryState.getConf(); + isParallelEnabled = (conf != null) + && HiveConf.getBoolVar(conf, ConfVars.HIVE_SERVER2_PARALLEL_COMPILATION); + this.userName = userName; } /** @@ -319,52 +328,6 @@ public int compile(String command) { } /** - * Hold state variables specific to each query being executed, that may not - * be consistent in the overall SessionState - */ - private static class QueryState { - private HiveOperation op; - private String cmd; - private boolean init = false; - - /** - * Initialize the queryState with the query state variables - */ - public void init(HiveOperation op, String cmd) { - this.op = op; - this.cmd = cmd; - this.init = true; - } - - public boolean isInitialized() { - return this.init; - } - - public HiveOperation getOp() { - return this.op; - } - - public String getCmd() { - return this.cmd; - } - } - - public void saveSession(QueryState qs) { - SessionState oldss = SessionState.get(); - if (oldss != null && oldss.getHiveOperation() != null) { - qs.init(oldss.getHiveOperation(), oldss.getCmd()); - } - } - - public void restoreSession(QueryState qs) { - SessionState ss = SessionState.get(); - if (ss != null && qs != null && qs.isInitialized()) { - ss.setCmd(qs.getCmd()); - ss.setCommandType(qs.getOp()); - } - } - - /** * Compile a new query, but potentially reset taskID counter. Not resetting task counter * is useful for generating re-entrant QL queries. * @param command The HiveQL query to compile @@ -391,9 +354,6 @@ public int compile(String command, boolean resetTaskIds) { LOG.warn("WARNING! Query command could not be redacted." + e); } - //holder for parent command type/string when executing reentrant queries - QueryState queryState = new QueryState(); - if (ctx != null) { close(); } @@ -401,7 +361,6 @@ public int compile(String command, boolean resetTaskIds) { if (resetTaskIds) { TaskFactory.resetId(); } - saveSession(queryState); String queryId = conf.getVar(HiveConf.ConfVars.HIVEQUERYID); @@ -446,7 +405,7 @@ public void run() { perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ANALYZE); - BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree); + BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree); List saHooks = getHooks(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, HiveSemanticAnalyzerHook.class); @@ -490,7 +449,7 @@ public void run() { schema = getSchema(sem, conf); plan = new QueryPlan(queryStr, sem, perfLogger.getStartTime(PerfLogger.DRIVER_RUN), queryId, - SessionState.get().getHiveOperation(), schema, queryDisplay); + queryState.getHiveOperation(), schema, queryDisplay); conf.setQueryString(queryStr); @@ -499,7 +458,7 @@ public void run() { // initialize FetchTask right here if (plan.getFetchTask() != null) { - plan.getFetchTask().initialize(conf, plan, null, ctx.getOpContext()); + plan.getFetchTask().initialize(queryState, plan, null, ctx.getOpContext()); } //do the authorization check @@ -508,7 +467,7 @@ public void run() { try { perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION); - doAuthorization(sem, command); + doAuthorization(queryState.getHiveOperation(), sem, command); } catch (AuthorizationException authExp) { console.printError("Authorization failed:" + authExp.getMessage() + ". Use SHOW GRANT to get more details."); @@ -562,7 +521,6 @@ public void run() { double duration = perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.COMPILE)/1000.00; ImmutableMap compileHMSTimings = dumpMetaCallTimingWithoutEx("compilation"); queryDisplay.setHmsTimings(QueryDisplay.Phase.COMPILATION, compileHMSTimings); - restoreSession(queryState); LOG.info("Completed compiling command(queryId=" + queryId + "); Time taken: " + duration + " seconds"); } } @@ -589,7 +547,7 @@ private String getExplainOutput(BaseSemanticAnalyzer sem, QueryPlan plan, ASTNode astTree) throws IOException { String ret = null; ExplainTask task = new ExplainTask(); - task.initialize(conf, plan, null, ctx.getOpContext()); + task.initialize(queryState, plan, null, ctx.getOpContext()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos); try { @@ -612,10 +570,9 @@ private String getExplainOutput(BaseSemanticAnalyzer sem, QueryPlan plan, * @throws HiveException * @throws AuthorizationException */ - public static void doAuthorization(BaseSemanticAnalyzer sem, String command) + public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, String command) throws HiveException, AuthorizationException { SessionState ss = SessionState.get(); - HiveOperation op = ss.getHiveOperation(); Hive db = sem.getDb(); Set additionalInputs = new HashSet(); @@ -1519,7 +1476,7 @@ public int execute() throws CommandNeedRetryException { resStream = null; SessionState ss = SessionState.get(); - HookContext hookContext = new HookContext(plan, conf, ctx.getPathToCS(), ss.getUserName(), + HookContext hookContext = new HookContext(plan, queryState, ctx.getPathToCS(), ss.getUserName(), ss.getUserIpAddress(), operationId); hookContext.setHookType(HookContext.HookType.PRE_EXEC_HOOK); @@ -1831,7 +1788,7 @@ private TaskRunner launchTask(Task tsk, String queryId, cxt.incCurJobNo(1); console.printInfo("Launching Job " + cxt.getCurJobNo() + " out of " + jobs); } - tsk.initialize(conf, plan, cxt, ctx.getOpContext()); + tsk.initialize(queryState, plan, cxt, ctx.getOpContext()); TaskResult tskRes = new TaskResult(); TaskRunner tskRun = new TaskRunner(tsk, tskRes); @@ -1921,7 +1878,7 @@ public void resetFetch() throws IOException { throw new IOException("Error closing the current fetch task", e); } // FetchTask should not depend on the plan. - fetchTask.initialize(conf, null, null, ctx.getOpContext()); + fetchTask.initialize(queryState, null, null, ctx.getOpContext()); } else { ctx.resetStream(); resStream = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java new file mode 100644 index 0000000..f50244b --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java @@ -0,0 +1,108 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql; + +import java.sql.Timestamp; +import java.util.Map; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.HiveOperation; + +/** + * The class to store query level info such as queryId. Multiple queries can run + * in the same session, so SessionState is to hold common session related info, and + * each QueryState is to hold query related info. + * + */ +public class QueryState { + /** + * current configuration. + */ + private final HiveConf queryConf; + /** + * type of the command. + */ + private HiveOperation commandType; + + public QueryState(HiveConf conf) { + this(conf, null, false); + } + + public QueryState(HiveConf conf, Map confOverlay, boolean runAsync) { + this.queryConf = createConf(conf, confOverlay, runAsync); + } + + /** + * If there are query specific settings to overlay, then create a copy of config + * There are two cases we need to clone the session config that's being passed to hive driver + * 1. Async query - + * If the client changes a config setting, that shouldn't reflect in the execution already underway + * 2. confOverlay - + * The query specific settings should only be applied to the query config and not session + * @return new configuration + */ + private HiveConf createConf(HiveConf conf, + Map confOverlay, + boolean runAsync) { + HiveConf queryConf = (conf == null ? new HiveConf() : conf); + if (runAsync || (confOverlay != null && !confOverlay.isEmpty()) ) { + queryConf = new HiveConf(conf); + + // apply overlay query specific settings, if any + for (Map.Entry confEntry : confOverlay.entrySet()) { + try { + queryConf.verifyAndSet(confEntry.getKey(), confEntry.getValue()); + } catch (IllegalArgumentException e) { + throw new RuntimeException("Error applying statement specific settings", e); + } + } + } + + queryConf.setVar(HiveConf.ConfVars.HIVEQUERYID, QueryPlan.makeQueryId()); + return queryConf; + } + + public String getQueryId() { + return (queryConf.getVar(HiveConf.ConfVars.HIVEQUERYID)); + } + + public String getQueryString() { + return queryConf.getQueryString(); + } + + public String getCommandType() { + if (commandType == null) { + return null; + } + return commandType.getOperationName(); + } + + public HiveOperation getHiveOperation() { + return commandType; + } + + public void setCommandType(HiveOperation commandType) { + this.commandType = commandType; + } + + public HiveConf getConf() { + return queryConf; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java index 9059928..05dfa3b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java @@ -49,6 +49,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; @@ -83,9 +84,9 @@ public ColumnStatsTask() { } @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, ctx, opContext); + super.initialize(queryState, queryPlan, ctx, opContext); work.initializeForFetch(opContext); try { JobConf job = new JobConf(conf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java index bca8a6c..9a6e5c9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java @@ -27,7 +27,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData; import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; @@ -45,6 +44,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -68,9 +68,9 @@ .getLogger(ColumnStatsUpdateTask.class); @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, ctx, opContext); + super.initialize(queryState, queryPlan, ctx, opContext); } private ColumnStatistics constructColumnStatsFromInput() diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index b26f09d..d2c3ca8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -91,6 +91,7 @@ import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.ArchiveUtils.PartSpecInfo; import org.apache.hadoop.hive.ql.exec.tez.TezTask; import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer; @@ -259,9 +260,9 @@ public DDLTask() { } @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, ctx, opContext); + super.initialize(queryState, queryPlan, ctx, opContext); // Pick the formatter to use to display the results. Either the // normal human readable output or a json object. @@ -663,7 +664,7 @@ private int mergeFiles(Hive db, AlterTablePartMergeFilesDesc mergeFilesDesc, } // initialize the task and execute - task.initialize(db.getConf(), getQueryPlan(), driverCxt, opContext); + task.initialize(queryState, getQueryPlan(), driverCxt, opContext); int ret = task.execute(driverCxt); return ret; } @@ -4160,7 +4161,7 @@ private int truncateTable(Hive db, TruncateTableDesc truncateTableDesc) throws H truncateWork.setMapperCannotSpanPartns(true); DriverContext driverCxt = new DriverContext(); ColumnTruncateTask taskExec = new ColumnTruncateTask(); - taskExec.initialize(db.getConf(), null, driverCxt, null); + taskExec.initialize(queryState, null, driverCxt, null); taskExec.setWork(truncateWork); taskExec.setQueryPlan(this.getQueryPlan()); return taskExec.execute(driverCxt); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 4116141..c841f70 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -337,7 +337,7 @@ private JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work) throws Exception { BaseSemanticAnalyzer analyzer = work.getAnalyzer(); - HiveOperation operation = SessionState.get().getHiveOperation(); + HiveOperation operation = queryState.getHiveOperation(); JSONObject object = new JSONObject(new LinkedHashMap<>()); Object jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, work); @@ -374,7 +374,7 @@ public void exception(Exception exception) { SessionState.get().setActiveAuthorizer(authorizer); try { - Driver.doAuthorization(analyzer, ""); + Driver.doAuthorization(queryState.getHiveOperation(), analyzer, ""); } finally { SessionState.get().setActiveAuthorizer(delegate); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java index 0b0c336..5473c15 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.mr.ExecMapper; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.HiveInputFormat; @@ -61,9 +62,9 @@ public FetchTask() { } @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, ctx, opContext); + super.initialize(queryState, queryPlan, ctx, opContext); work.initializeForFetch(opContext); try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java index 1b971fc..42cdc84 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java @@ -25,6 +25,7 @@ import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; @@ -37,6 +38,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc; @@ -62,9 +64,9 @@ public FunctionTask() { } @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, ctx, opContext); + super.initialize(queryState, queryPlan, ctx, opContext); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java index 3199ee1..d5ae019 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.io.StatsProvidingRecordReader; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -84,9 +85,9 @@ public StatsNoJobTask() { } @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, driverContext, opContext); + super.initialize(queryState, queryPlan, driverContext, opContext); jc = new JobConf(conf); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java index 6c677f5..897af5e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java @@ -51,6 +51,7 @@ public transient HashMap taskCounters; public transient TaskHandle taskHandle; protected transient HiveConf conf; + protected transient QueryState queryState; protected transient LogHelper console; protected transient QueryPlan queryPlan; protected transient DriverContext driverContext; @@ -124,11 +125,12 @@ public TaskHandle getTaskHandle() { return taskHandle; } - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { this.queryPlan = queryPlan; setInitialized(); - this.conf = conf; + this.queryState = queryState; + this.conf = queryState.getConf(); this.driverContext = driverContext; console = new LogHelper(LOG); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index d164859..639b0da 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.FetchOperator; import org.apache.hadoop.hive.ql.exec.HiveTotalOrderPartitioner; import org.apache.hadoop.hive.ql.exec.Operator; @@ -124,7 +125,7 @@ public ExecDriver() { super(); console = new LogHelper(LOG); - this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); + this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, this); } @Override @@ -142,9 +143,9 @@ private void initializeFiles(String prop, String files) { * Initialization when invoked from QL. */ @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, driverContext, opContext); + super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, ExecDriver.class); @@ -168,7 +169,7 @@ public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverC HiveConf.setVar(job, ConfVars.HIVEADDEDARCHIVES, addedArchives); } conf.stripHiddenConfigurations(job); - this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); + this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, this); } /** @@ -178,7 +179,7 @@ public ExecDriver(MapredWork plan, JobConf job, boolean isSilent) throws HiveExc setWork(plan); this.job = job; console = new LogHelper(LOG, isSilent); - this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); + this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, this); } /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java index 1b296b9..760ba6c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.MapRedStats; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskHandle; @@ -75,6 +76,7 @@ public transient JobID jobId; private final LogHelper console; private final HadoopJobExecHook callBackObj; + private final QueryState queryState; /** * Update counters relevant to this task. @@ -135,8 +137,9 @@ public void setJobId(JobID jobId) { this.jobId = jobId; } - public HadoopJobExecHelper(JobConf job, LogHelper console, + public HadoopJobExecHelper(QueryState queryState, JobConf job, LogHelper console, Task task, HadoopJobExecHook hookCallBack) { + this.queryState = queryState; this.job = job; this.console = console; this.task = task; @@ -250,14 +253,14 @@ private MapRedStats progress(ExecDriverTaskHandle th) throws IOException { String logMapper; String logReducer; - + String queryId = queryState.getQueryId(); TaskReport[] mappers = jc.getMapTaskReports(rj.getID()); if (mappers == null) { logMapper = "no information for number of mappers; "; } else { numMap = mappers.length; if (ss != null) { - ss.getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), getId(), + ss.getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_NUM_MAPPERS, Integer.toString(numMap)); } logMapper = "number of mappers: " + numMap + "; "; @@ -269,7 +272,7 @@ private MapRedStats progress(ExecDriverTaskHandle th) throws IOException { } else { numReduce = reducers.length; if (ss != null) { - ss.getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), getId(), + ss.getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_NUM_REDUCERS, Integer.toString(numReduce)); } logReducer = "number of reducers: " + numReduce; @@ -355,11 +358,11 @@ private MapRedStats progress(ExecDriverTaskHandle th) throws IOException { String output = report.toString(); SessionState ss = SessionState.get(); if (ss != null) { - ss.getHiveHistory().setTaskCounters(SessionState.get().getQueryId(), getId(), ctrs); - ss.getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), getId(), + ss.getHiveHistory().setTaskCounters(queryState.getQueryId(), getId(), ctrs); + ss.getHiveHistory().setTaskProperty(queryState.getQueryId(), getId(), Keys.TASK_HADOOP_PROGRESS, output); if (ss.getConf().getBoolVar(HiveConf.ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS)) { - ss.getHiveHistory().progressTask(SessionState.get().getQueryId(), this.task); + ss.getHiveHistory().progressTask(queryState.getQueryId(), this.task); this.callBackObj.logPlanProgress(ss); } } @@ -386,7 +389,7 @@ private MapRedStats progress(ExecDriverTaskHandle th) throws IOException { } else { SessionState ss = SessionState.get(); if (ss != null) { - ss.getHiveHistory().setTaskCounters(SessionState.get().getQueryId(), getId(), ctrs); + ss.getHiveHistory().setTaskCounters(queryState.getQueryId(), getId(), ctrs); } success = rj.isSuccessful(); } @@ -430,7 +433,7 @@ public void jobInfo(RunningJob rj) { console.printInfo("Job running in-process (local Hadoop)"); } else { if (SessionState.get() != null) { - SessionState.get().getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), + SessionState.get().getHiveHistory().setTaskProperty(queryState.getQueryId(), getId(), Keys.TASK_HADOOP_ID, rj.getID().toString()); } console.printInfo(getJobStartMsg(rj.getID()) + ", Tracking URL = " diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java index f5500a4..a91dac2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.BucketMatcher; import org.apache.hadoop.hive.ql.exec.FetchOperator; import org.apache.hadoop.hive.ql.exec.Operator; @@ -120,13 +121,13 @@ public void setExecContext(ExecMapperContext execContext) { } @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, driverContext, opContext); + super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, ExecDriver.class); execContext = new ExecMapperContext(job); //we don't use the HadoopJobExecHooks for local tasks - this.jobExecHelper = new HadoopJobExecHelper(job, console, this, null); + this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, null); } public static String now() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java index 7f87adf..0b494aa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.MapOperator; @@ -71,9 +72,9 @@ private static final long serialVersionUID = 1L; @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, driverContext, opContext); + super.initialize(queryState, queryPlan, driverContext, opContext); } @Override @@ -137,7 +138,7 @@ private void addToHistory(SparkJobRef jobRef) { console.printInfo("Starting Spark Job = " + jobRef.getJobId()); if (SessionState.get() != null) { SessionState.get().getHiveHistory() - .setQueryProperty(SessionState.get().getQueryId(), Keys.SPARK_JOB_ID, jobRef.getJobId()); + .setQueryProperty(queryState.getQueryId(), Keys.SPARK_JOB_ID, jobRef.getJobId()); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java index a1e35cb..0234fd9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java @@ -266,13 +266,9 @@ public void endQuery(String queryId) { @Override public void startTask(String queryId, Task task, String taskName) { - SessionState ss = SessionState.get(); - if (ss == null) { - return; - } TaskInfo ti = new TaskInfo(); - ti.hm.put(Keys.QUERY_ID.name(), ss.getQueryId()); + ti.hm.put(Keys.QUERY_ID.name(), queryId); ti.hm.put(Keys.TASK_ID.name(), task.getId()); ti.hm.put(Keys.TASK_NAME.name(), taskName); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java index f490161..7a7ad29 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -95,6 +96,7 @@ public void run() { public void run(final HookContext hookContext) throws Exception { final long currentTime = System.currentTimeMillis(); final HiveConf conf = new HiveConf(hookContext.getConf()); + final QueryState queryState = hookContext.getQueryState(); executor.submit(new Runnable() { @Override @@ -136,7 +138,7 @@ public void run() { ); @SuppressWarnings("unchecked") ExplainTask explain = (ExplainTask) TaskFactory.get(work, conf); - explain.initialize(conf, plan, null, null); + explain.initialize(queryState, plan, null, null); String query = plan.getQueryStr(); JSONObject explainPlan = explain.getJSONPlan(null, work); String logID = conf.getLogIdVar(SessionState.get().getSessionId()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java index bed17e9..357c49c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java @@ -27,6 +27,7 @@ import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskRunner; import org.apache.hadoop.hive.ql.optimizer.lineage.LineageCtx.Index; import org.apache.hadoop.hive.ql.session.SessionState; @@ -43,6 +44,7 @@ } private QueryPlan queryPlan; + private final QueryState queryState; private HiveConf conf; private List completeTaskList; private Set inputs; @@ -58,11 +60,12 @@ // TExecuteStatementResp.TOperationHandle.THandleIdentifier.guid private final String operationId; - public HookContext(QueryPlan queryPlan, HiveConf conf, + public HookContext(QueryPlan queryPlan, QueryState queryState, Map inputPathToContentSummary, String userName, String ipAddress, String operationId) throws Exception { this.queryPlan = queryPlan; - this.conf = conf; + this.queryState = queryState; + this.conf = queryState.getConf(); this.inputPathToContentSummary = inputPathToContentSummary; completeTaskList = new ArrayList(); inputs = queryPlan.getInputs(); @@ -174,4 +177,8 @@ public String getUserName() { public String getOperationId() { return operationId; } + + public QueryState getQueryState() { + return queryState; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java index 4518315..b4fc125 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.hooks.HookContext.HookType; import org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo; import org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency; @@ -97,15 +98,14 @@ else if (o1.getKey().getDataContainer().isPartition() && @Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK); - SessionState ss = SessionState.get(); Set inputs = hookContext.getInputs(); Set outputs = hookContext.getOutputs(); LineageInfo linfo = hookContext.getLinfo(); UserGroupInformation ugi = hookContext.getUgi(); - this.run(ss,inputs,outputs,linfo,ugi); + this.run(hookContext.getQueryState(),inputs,outputs,linfo,ugi); } - public void run(SessionState sess, Set inputs, + public void run(QueryState queryState, Set inputs, Set outputs, LineageInfo linfo, UserGroupInformation ugi) throws Exception { @@ -115,9 +115,9 @@ public void run(SessionState sess, Set inputs, return; } - if (sess != null) { - console.printError("POSTHOOK: query: " + sess.getCmd().trim()); - console.printError("POSTHOOK: type: " + sess.getCommandType()); + if (queryState != null) { + console.printError("POSTHOOK: query: " + queryState.getQueryString().trim()); + console.printError("POSTHOOK: type: " + queryState.getCommandType()); } PreExecutePrinter.printEntities(console, inputs, "POSTHOOK: Input: "); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java index b5a26d8..232c62d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java @@ -24,6 +24,7 @@ import java.util.Set; import org.apache.hadoop.hive.common.io.FetchConverter; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.hooks.HookContext.HookType; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.session.SessionState; @@ -40,8 +41,9 @@ public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.PRE_EXEC_HOOK); SessionState ss = SessionState.get(); + QueryState queryState = hookContext.getQueryState(); if (ss != null && ss.out instanceof FetchConverter) { - boolean foundQuery = ss.getHiveOperation() == HiveOperation.QUERY && + boolean foundQuery = queryState.getHiveOperation() == HiveOperation.QUERY && !hookContext.getQueryPlan().isForExplain(); ((FetchConverter)ss.out).foundQuery(foundQuery); } @@ -49,10 +51,10 @@ public void run(HookContext hookContext) throws Exception { Set inputs = hookContext.getInputs(); Set outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); - this.run(ss,inputs,outputs,ugi); + this.run(queryState,inputs,outputs,ugi); } - public void run(SessionState sess, Set inputs, + public void run(QueryState queryState, Set inputs, Set outputs, UserGroupInformation ugi) throws Exception { @@ -62,9 +64,9 @@ public void run(SessionState sess, Set inputs, return; } - if (sess != null) { - console.printError("PREHOOK: query: " + sess.getCmd().trim()); - console.printError("PREHOOK: type: " + sess.getCommandType()); + if (queryState != null) { + console.printError("PREHOOK: query: " + queryState.getQueryString().trim()); + console.printError("PREHOOK: type: " + queryState.getCommandType()); } printEntities(console, inputs, "PREHOOK: Input: "); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java index 807959e..29886ae 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java @@ -26,8 +26,6 @@ import java.util.Map.Entry; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.common.StatsSetupConst; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.ql.exec.Task; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java index 82629c1..6b0343b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -60,11 +61,11 @@ private boolean success = true; @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, driverContext, opContext); + super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, MergeFileTask.class); - jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this); + jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, this, this); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java index 71371a3..d31510d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper; @@ -84,11 +85,11 @@ protected HadoopJobExecHelper jobExecHelper; @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, driverContext, opContext); + super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, PartialScanTask.class); - jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this); + jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, this, this); } @Override @@ -331,7 +332,6 @@ public static void main(String[] args) { if (jobConfFileName != null) { conf.addResource(new Path(jobConfFileName)); } - HiveConf hiveConf = new HiveConf(conf, PartialScanTask.class); org.slf4j.Logger LOG = LoggerFactory.getLogger(PartialScanTask.class.getName()); boolean isSilent = HiveConf.getBoolVar(conf, @@ -348,11 +348,11 @@ public static void main(String[] args) { } } - + QueryState queryState = new QueryState(new HiveConf(conf, PartialScanTask.class)); PartialScanWork mergeWork = new PartialScanWork(inputPaths); DriverContext driverCxt = new DriverContext(); PartialScanTask taskExec = new PartialScanTask(); - taskExec.initialize(hiveConf, null, driverCxt, new CompilationOpContext()); + taskExec.initialize(queryState, null, driverCxt, new CompilationOpContext()); taskExec.setWork(mergeWork); int ret = taskExec.execute(driverCxt); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java index bc21da0..8acd6e0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper; @@ -59,11 +60,11 @@ protected HadoopJobExecHelper jobExecHelper; @Override - public void initialize(HiveConf conf, QueryPlan queryPlan, + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { - super.initialize(conf, queryPlan, driverContext, opContext); + super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, ColumnTruncateTask.class); - jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this); + jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, this, this); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java index 9c979be..669d56f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java @@ -204,7 +204,7 @@ private void handlePartialScanCommand(TableScanOperator op, GenMRProcContext ctx // partial scan task DriverContext driverCxt = new DriverContext(); Task psTask = TaskFactory.get(scanWork, parseCtx.getConf()); - psTask.initialize(parseCtx.getConf(), null, driverCxt, op.getCompilationOpContext()); + psTask.initialize(parseCtx.getQueryState(), null, driverCxt, op.getCompilationOpContext()); psTask.setWork(scanWork); // task dependency diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java index 64f9734..340d29a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java @@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; @@ -56,16 +57,16 @@ * @param command * @throws SemanticException */ - public static Operator generateOperatorTree(HiveConf conf, + public static Operator generateOperatorTree(QueryState queryState, String command) throws SemanticException { Operator operatorTree; try { - Context ctx = new Context(conf); + Context ctx = new Context(queryState.getConf()); ParseDriver pd = new ParseDriver(); ASTNode tree = pd.parse(command, ctx); tree = ParseUtils.findRootNonNullToken(tree); - BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree); + BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree); assert(sem instanceof SemanticAnalyzer); operatorTree = doSemanticAnalysis((SemanticAnalyzer) sem, tree, ctx); LOG.info("Sub-query Semantic Analysis Completed"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java index 74bedcb..3d11907 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java @@ -268,7 +268,7 @@ private void replaceGroupByOperatorProcess(GroupByOperator operator, int index) + rewriteQueryCtx.getIndexKey() + " "; // retrieve the operator tree for the query, and the required GroupByOperator from it Operator newOperatorTree = RewriteParseContextGenerator.generateOperatorTree( - rewriteQueryCtx.getParseContext().getConf(), + rewriteQueryCtx.getParseContext().getQueryState(), selReplacementCommand); // we get our new GroupByOperator here diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index f6ba521..e967c93 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryProperties; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -87,6 +88,7 @@ // Assumes one instance of this + single-threaded compilation for each query. protected final Hive db; protected final HiveConf conf; + protected final QueryState queryState; protected List> rootTasks; protected FetchTask fetchTask; protected final Logger LOG; @@ -199,13 +201,14 @@ protected void analyzeRowFormat(ASTNode child) throws SemanticException { } } - public BaseSemanticAnalyzer(HiveConf conf) throws SemanticException { - this(conf, createHiveDB(conf)); + public BaseSemanticAnalyzer(QueryState queryState) throws SemanticException { + this(queryState, createHiveDB(queryState.getConf())); } - public BaseSemanticAnalyzer(HiveConf conf, Hive db) throws SemanticException { + public BaseSemanticAnalyzer(QueryState queryState, Hive db) throws SemanticException { try { - this.conf = conf; + this.queryState = queryState; + this.conf = queryState.getConf(); this.db = db; rootTasks = new ArrayList>(); LOG = LoggerFactory.getLogger(this.getClass().getName()); @@ -1504,4 +1507,7 @@ protected String toMessage(ErrorMsg message, Object detail) { public HashSet getAllOutputs() { return outputs; } + public QueryState getQueryState() { + return queryState; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index b59347d..59c1335 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -104,6 +104,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryProperties; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -217,8 +218,8 @@ private boolean disableSemJoinReordering = true; private EnumSet profilesCBO; - public CalcitePlanner(HiveConf conf) throws SemanticException { - super(conf); + public CalcitePlanner(QueryState queryState) throws SemanticException { + super(queryState); if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED)) { runCBO = false; disableSemJoinReordering = false; @@ -514,18 +515,18 @@ Table materializeCTE(String cteName, CTEClause cte) throws HiveException { createTable.addChild(temporary); createTable.addChild(cte.cteNode); - CalcitePlanner analyzer = new CalcitePlanner(conf); + CalcitePlanner analyzer = new CalcitePlanner(queryState); analyzer.initCtx(ctx); analyzer.init(false); // should share cte contexts analyzer.aliasToCTEs.putAll(aliasToCTEs); - HiveOperation operation = SessionState.get().getHiveOperation(); + HiveOperation operation = queryState.getHiveOperation(); try { analyzer.analyzeInternal(createTable); } finally { - SessionState.get().setCommandType(operation); + queryState.setCommandType(operation); } Table table = analyzer.tableDesc.toTable(conf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java index bb1bbad..3b6cbce 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java @@ -25,15 +25,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.HiveStatsUtils; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; @@ -62,8 +61,8 @@ private List colType; private Table tbl; - public ColumnStatsSemanticAnalyzer(HiveConf conf) throws SemanticException { - super(conf); + public ColumnStatsSemanticAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); } private boolean shouldRewrite(ASTNode tree) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index fe9b8cc..b704a7b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.ArchiveUtils; import org.apache.hadoop.hive.ql.exec.ColumnStatsUpdateTask; import org.apache.hadoop.hive.ql.exec.FetchTask; @@ -229,12 +230,12 @@ public static String getTypeName(ASTNode node) throws SemanticException { return typeName; } - public DDLSemanticAnalyzer(HiveConf conf) throws SemanticException { - this(conf, createHiveDB(conf)); + public DDLSemanticAnalyzer(QueryState queryState) throws SemanticException { + this(queryState, createHiveDB(queryState.getConf())); } - public DDLSemanticAnalyzer(HiveConf conf, Hive db) throws SemanticException { - super(conf, db); + public DDLSemanticAnalyzer(QueryState queryState, Hive db) throws SemanticException { + super(queryState, db); reservedPartitionValues = new HashSet(); // Partition can't have this name reservedPartitionValues.add(HiveConf.getVar(conf, ConfVars.DEFAULTPARTITIONNAME)); @@ -1350,9 +1351,9 @@ private void analyzeAlterTableProps(String[] qualified, HashMap HashMap mapProp = getProps((ASTNode) (ast.getChild(0)) .getChild(0)); EnvironmentContext environmentContext = null; - if (SessionState.get().getCommandType() + if (queryState.getCommandType() .equals(HiveOperation.ALTERTABLE_UPDATETABLESTATS.getOperationName()) - || SessionState.get().getCommandType() + || queryState.getCommandType() .equals(HiveOperation.ALTERTABLE_UPDATEPARTSTATS.getOperationName())) { // we need to check if the properties are valid, especially for stats. boolean changeStatsSucceeded = false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java index 6f0f3a6..8d7fd92 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java @@ -19,8 +19,8 @@ import java.util.List; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.ExplainSQRewriteTask; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.plan.ExplainSQRewriteWork; @@ -28,8 +28,8 @@ public class ExplainSQRewriteSemanticAnalyzer extends BaseSemanticAnalyzer { List fieldList; - public ExplainSQRewriteSemanticAnalyzer(HiveConf conf) throws SemanticException { - super(conf); + public ExplainSQRewriteSemanticAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); } @SuppressWarnings("unchecked") @@ -42,7 +42,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { // Create a semantic analyzer for the query ASTNode input = (ASTNode) ast.getChild(0); SemanticAnalyzer sem = (SemanticAnalyzer) - SemanticAnalyzerFactory.get(conf, input); + SemanticAnalyzerFactory.get(queryState, input); sem.analyze(input, ctx); sem.validate(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java index e393be2..0c4238b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.Task; @@ -37,8 +38,8 @@ public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer { List fieldList; - public ExplainSemanticAnalyzer(HiveConf conf) throws SemanticException { - super(conf); + public ExplainSemanticAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); } @SuppressWarnings("unchecked") @@ -70,7 +71,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { // Create a semantic analyzer for the query ASTNode input = (ASTNode) ast.getChild(0); - BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, input); + BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, input); sem.analyze(input, ctx); sem.validate(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java index fe8147a..475f2c9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -47,8 +48,8 @@ private ReplicationSpec replicationSpec; - public ExportSemanticAnalyzer(HiveConf conf) throws SemanticException { - super(conf); + public ExportSemanticAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java index be908d3..1ec45ee 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.metastore.api.ResourceType; import org.apache.hadoop.hive.metastore.api.ResourceUri; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.FunctionUtils; @@ -49,8 +50,8 @@ private static final Logger LOG = LoggerFactory .getLogger(FunctionSemanticAnalyzer.class); - public FunctionSemanticAnalyzer(HiveConf conf) throws SemanticException { - super(conf); + public FunctionSemanticAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index 549d24f..a9bc271 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -74,8 +75,8 @@ public static final String METADATA_NAME="_metadata"; - public ImportSemanticAnalyzer(HiveConf conf) throws SemanticException { - super(conf); + public ImportSemanticAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); } private boolean tableExists = false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java index b90616f..a49b813 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -58,8 +59,8 @@ */ public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer { - public LoadSemanticAnalyzer(HiveConf conf) throws SemanticException { - super(conf); + public LoadSemanticAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); } public static FileStatus[] matchFilesOrDir(FileSystem fs, Path path) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java index e394914..fe065f8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.FunctionUtils; @@ -44,13 +45,10 @@ import org.apache.hadoop.hive.ql.lib.Dispatcher; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.PreOrderWalker; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.CreateMacroDesc; import org.apache.hadoop.hive.ql.plan.DropMacroDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FunctionWork; -import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; @@ -62,8 +60,8 @@ private static final Logger LOG = LoggerFactory .getLogger(MacroSemanticAnalyzer.class); - public MacroSemanticAnalyzer(HiveConf conf) throws SemanticException { - super(conf); + public MacroSemanticAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); } @Override @@ -132,8 +130,8 @@ public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) throw new SemanticException("At least one parameter name was used more than once " + macroColNames); } - SemanticAnalyzer sa = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) ? new CalcitePlanner( - conf) : new SemanticAnalyzer(conf); + SemanticAnalyzer sa = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) ? + new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState); ; ExprNodeDesc body; if(isNoArgumentMacro) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java index 1bccf20..5d0f905 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryProperties; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.JoinOperator; @@ -78,6 +79,7 @@ private List loadTableWork; private List loadFileWork; private Context ctx; + private QueryState queryState; private HiveConf conf; private HashMap idToTableNameMap; private int destTableId; @@ -153,7 +155,7 @@ public ParseContext() { * @param rootTasks */ public ParseContext( - HiveConf conf, + QueryState queryState, HashMap opToPartPruner, HashMap opToPartList, HashMap topOps, @@ -173,7 +175,8 @@ public ParseContext( List reduceSinkOperatorsAddedByEnforceBucketingSorting, AnalyzeRewriteContext analyzeRewrite, CreateTableDesc createTableDesc, QueryProperties queryProperties, Map viewProjectToTableSchema) { - this.conf = conf; + this.queryState = queryState; + this.conf = queryState.getConf(); this.opToPartPruner = opToPartPruner; this.opToPartList = opToPartList; this.joinOps = joinOps; @@ -241,6 +244,13 @@ public void setConf(HiveConf conf) { } /** + * @return the hive conf + */ + public QueryState getQueryState() { + return queryState; + } + + /** * @return the opToPartPruner */ public HashMap getOpToPartPruner() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java index 93b7a66..c13a404 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java @@ -190,7 +190,7 @@ private void handlePartialScanCommand(TableScanOperator tableScan, ParseContext // partial scan task DriverContext driverCxt = new DriverContext(); Task partialScanTask = TaskFactory.get(scanWork, parseContext.getConf()); - partialScanTask.initialize(parseContext.getConf(), null, driverCxt, + partialScanTask.initialize(parseContext.getQueryState(), null, driverCxt, tableScan.getCompilationOpContext()); partialScanTask.setWork(scanWork); statsWork.setSourceTask(partialScanTask); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 987f25d..5b69c75 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -72,6 +72,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryProperties; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.ArchiveUtils; import org.apache.hadoop.hive.ql.exec.ColumnInfo; @@ -338,8 +339,8 @@ int nextNum; } - public SemanticAnalyzer(HiveConf conf) throws SemanticException { - super(conf); + public SemanticAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); opToPartPruner = new HashMap(); opToPartList = new HashMap(); opToSamplePruner = new HashMap(); @@ -440,7 +441,7 @@ public void initParseCtx(ParseContext pctx) { public ParseContext getParseContext() { // Make sure the basic query properties are initialized copyInfoToQueryProperties(queryProperties); - return new ParseContext(conf, opToPartPruner, opToPartList, topOps, + return new ParseContext(queryState, opToPartPruner, opToPartList, topOps, new HashSet(joinContext.keySet()), new HashSet(smbMapJoinContext.keySet()), loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx, @@ -1190,18 +1191,18 @@ Table materializeCTE(String cteName, CTEClause cte) throws HiveException { createTable.addChild(temporary); createTable.addChild(cte.cteNode); - SemanticAnalyzer analyzer = new SemanticAnalyzer(conf); + SemanticAnalyzer analyzer = new SemanticAnalyzer(queryState); analyzer.initCtx(ctx); analyzer.init(false); // should share cte contexts analyzer.aliasToCTEs.putAll(aliasToCTEs); - HiveOperation operation = SessionState.get().getHiveOperation(); + HiveOperation operation = queryState.getHiveOperation(); try { analyzer.analyzeInternal(createTable); } finally { - SessionState.get().setCommandType(operation); + queryState.setCommandType(operation); } Table table = analyzer.tableDesc.toTable(conf); @@ -6949,7 +6950,7 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) if (ltd != null && SessionState.get() != null) { SessionState.get().getLineageState() .mapDirToFop(ltd.getSourcePath(), (FileSinkOperator) output); - } else if ( SessionState.get().getCommandType().equals(HiveOperation.CREATETABLE_AS_SELECT.getOperationName())) { + } else if ( queryState.getCommandType().equals(HiveOperation.CREATETABLE_AS_SELECT.getOperationName())) { Path tlocation = null; String tName = Utilities.getDbTableName(tableDesc.getTableName())[1]; @@ -9312,7 +9313,7 @@ private Operator genPostGroupByBodyPlan(Operator curr, String dest, QB qb, limit.intValue(), extraMRStep); qb.getParseInfo().setOuterQueryLimit(limit.intValue()); } - if (!SessionState.get().getHiveOperation().equals(HiveOperation.CREATEVIEW)) { + if (!queryState.getHiveOperation().equals(HiveOperation.CREATEVIEW)) { curr = genFileSinkPlan(dest, qb, curr); } } @@ -10317,7 +10318,7 @@ boolean analyzeCreateTable(ASTNode child) throws SemanticException { return true; } } else { - SessionState.get().setCommandType(HiveOperation.QUERY); + queryState.setCommandType(HiveOperation.QUERY); } return false; @@ -10513,14 +10514,14 @@ boolean genResolvedParseTree(ASTNode ast, PlannerContext plannerCtx) throws Sema return false; } } else { - SessionState.get().setCommandType(HiveOperation.QUERY); + queryState.setCommandType(HiveOperation.QUERY); } // 3. analyze create view command if (ast.getToken().getType() == HiveParser.TOK_CREATEVIEW || (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveParser.TOK_QUERY)) { child = analyzeCreateView(ast, qb); - SessionState.get().setCommandType(HiveOperation.CREATEVIEW); + queryState.setCommandType(HiveOperation.CREATEVIEW); if (child == null) { return false; } @@ -10549,7 +10550,7 @@ else if(ast.getChild(0).getType() == HiveParser.TOK_FALSE) { throw new IllegalStateException(SemanticAnalyzerFactory.getOperation(ast.getToken().getType()) + " is not supported yet."); } - SessionState.get().setCommandType(SemanticAnalyzerFactory.getOperation(ast.getToken().getType())); + queryState.setCommandType(SemanticAnalyzerFactory.getOperation(ast.getToken().getType())); return false; } @@ -10647,7 +10648,7 @@ void analyzeInternal(ASTNode ast, PlannerContext plannerCtx) throws SemanticExce // 4. Generate Parse Context for Optimizer & Physical compiler copyInfoToQueryProperties(queryProperties); - ParseContext pCtx = new ParseContext(conf, opToPartPruner, opToPartList, topOps, + ParseContext pCtx = new ParseContext(queryState, opToPartPruner, opToPartList, topOps, new HashSet(joinContext.keySet()), new HashSet(smbMapJoinContext.keySet()), loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx, @@ -10727,7 +10728,7 @@ void analyzeInternal(ASTNode ast, PlannerContext plannerCtx) throws SemanticExce // TEZ..) if (!ctx.getExplainLogical()) { TaskCompiler compiler = TaskCompilerFactory.getCompiler(conf, pCtx); - compiler.init(conf, console, db); + compiler.init(queryState, console, db); compiler.compile(pCtx, rootTasks, inputs, outputs); fetchTask = pCtx.getFetchTask(); } @@ -11492,7 +11493,7 @@ ASTNode analyzeCreateTable( crtTblDesc.validate(conf); // outputs is empty, which means this create table happens in the current // database. - SessionState.get().setCommandType(HiveOperation.CREATETABLE); + queryState.setCommandType(HiveOperation.CREATETABLE); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblDesc), conf)); break; @@ -11511,7 +11512,7 @@ ASTNode analyzeCreateTable( storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, ifNotExists, likeTableName, isUserStorageFormat); - SessionState.get().setCommandType(HiveOperation.CREATETABLE); + queryState.setCommandType(HiveOperation.CREATETABLE); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblLikeDesc), conf)); break; @@ -11585,7 +11586,7 @@ ASTNode analyzeCreateTable( tableDesc.setNullFormat(rowFormatParams.nullFormat); qb.setTableDesc(tableDesc); - SessionState.get().setCommandType(HiveOperation.CREATETABLE_AS_SELECT); + queryState.setCommandType(HiveOperation.CREATETABLE_AS_SELECT); return selectStmt; default: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index 07ca409..fb8a33c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -20,6 +20,8 @@ import org.antlr.runtime.tree.Tree; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.session.SessionState; @@ -158,24 +160,24 @@ HiveOperation.ALTERTABLE_UPDATEPARTSTATS}); } - public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) + public static BaseSemanticAnalyzer get(QueryState queryState, ASTNode tree) throws SemanticException { if (tree.getToken() == null) { throw new RuntimeException("Empty Syntax Tree"); } else { - setSessionCommandType(commandType.get(tree.getType())); - + HiveOperation opType = commandType.get(tree.getType()); + queryState.setCommandType(opType); switch (tree.getType()) { case HiveParser.TOK_EXPLAIN: - return new ExplainSemanticAnalyzer(conf); + return new ExplainSemanticAnalyzer(queryState); case HiveParser.TOK_EXPLAIN_SQ_REWRITE: - return new ExplainSQRewriteSemanticAnalyzer(conf); + return new ExplainSQRewriteSemanticAnalyzer(queryState); case HiveParser.TOK_LOAD: - return new LoadSemanticAnalyzer(conf); + return new LoadSemanticAnalyzer(queryState); case HiveParser.TOK_EXPORT: - return new ExportSemanticAnalyzer(conf); + return new ExportSemanticAnalyzer(queryState); case HiveParser.TOK_IMPORT: - return new ImportSemanticAnalyzer(conf); + return new ImportSemanticAnalyzer(queryState); case HiveParser.TOK_ALTERTABLE: { Tree child = tree.getChild(1); switch (child.getType()) { @@ -193,13 +195,13 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_ALTERTABLE_DROPPROPERTIES: case HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION: case HiveParser.TOK_ALTERTABLE_SKEWED: - setSessionCommandType(commandType.get(child.getType())); - return new DDLSemanticAnalyzer(conf); + queryState.setCommandType(commandType.get(child.getType())); + return new DDLSemanticAnalyzer(queryState); } - HiveOperation commandType = + opType = tablePartitionCommandType.get(child.getType())[tree.getChildCount() > 2 ? 1 : 0]; - setSessionCommandType(commandType); - return new DDLSemanticAnalyzer(conf); + queryState.setCommandType(opType); + return new DDLSemanticAnalyzer(queryState); } case HiveParser.TOK_ALTERVIEW: { Tree child = tree.getChild(1); @@ -209,13 +211,14 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_ALTERVIEW_ADDPARTS: case HiveParser.TOK_ALTERVIEW_DROPPARTS: case HiveParser.TOK_ALTERVIEW_RENAME: - setSessionCommandType(commandType.get(child.getType())); - return new DDLSemanticAnalyzer(conf); + opType = commandType.get(child.getType()); + queryState.setCommandType(opType); + return new DDLSemanticAnalyzer(queryState); } // TOK_ALTERVIEW_AS assert child.getType() == HiveParser.TOK_QUERY; - setSessionCommandType(HiveOperation.ALTERVIEW_AS); - return new SemanticAnalyzer(conf); + queryState.setCommandType(HiveOperation.ALTERVIEW_AS); + return new SemanticAnalyzer(queryState); } case HiveParser.TOK_CREATEDATABASE: case HiveParser.TOK_DROPDATABASE: @@ -265,23 +268,23 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_TRUNCATETABLE: case HiveParser.TOK_SHOW_SET_ROLE: case HiveParser.TOK_CACHE_METADATA: - return new DDLSemanticAnalyzer(conf); + return new DDLSemanticAnalyzer(queryState); case HiveParser.TOK_CREATEFUNCTION: case HiveParser.TOK_DROPFUNCTION: case HiveParser.TOK_RELOADFUNCTION: - return new FunctionSemanticAnalyzer(conf); + return new FunctionSemanticAnalyzer(queryState); case HiveParser.TOK_ANALYZE: - return new ColumnStatsSemanticAnalyzer(conf); + return new ColumnStatsSemanticAnalyzer(queryState); case HiveParser.TOK_CREATEMACRO: case HiveParser.TOK_DROPMACRO: - return new MacroSemanticAnalyzer(conf); + return new MacroSemanticAnalyzer(queryState); case HiveParser.TOK_UPDATE_TABLE: case HiveParser.TOK_DELETE_FROM: - return new UpdateDeleteSemanticAnalyzer(conf); + return new UpdateDeleteSemanticAnalyzer(queryState); case HiveParser.TOK_START_TRANSACTION: case HiveParser.TOK_COMMIT: @@ -289,20 +292,14 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_SET_AUTOCOMMIT: default: { SemanticAnalyzer semAnalyzer = HiveConf - .getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) ? new CalcitePlanner(conf) - : new SemanticAnalyzer(conf); + .getBoolVar(queryState.getConf(), HiveConf.ConfVars.HIVE_CBO_ENABLED) ? + new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState); return semAnalyzer; } } } } - private static void setSessionCommandType(HiveOperation commandType) { - if (SessionState.get() != null) { - SessionState.get().setCommandType(commandType); - } - } - private SemanticAnalyzerFactory() { // prevent instantiation } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index f7d7a40..21233d6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.ColumnStatsTask; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.StatsTask; @@ -74,10 +75,12 @@ // Assumes one instance of this + single-threaded compilation for each query. protected Hive db; protected LogHelper console; + protected QueryState queryState; protected HiveConf conf; - public void init(HiveConf conf, LogHelper console, Hive db) { - this.conf = conf; + public void init(QueryState queryState, LogHelper console, Hive db) { + this.queryState = queryState; + this.conf = queryState.getConf(); this.db = db; this.console = console; } @@ -393,7 +396,7 @@ protected abstract void generateTaskTree(List> root * Create a clone of the parse context */ public ParseContext getParseContext(ParseContext pCtx, List> rootTasks) { - ParseContext clone = new ParseContext(conf, + ParseContext clone = new ParseContext(queryState, pCtx.getOpToPartPruner(), pCtx.getOpToPartList(), pCtx.getTopOps(), pCtx.getJoinOps(), pCtx.getSmbMapJoinOps(), pCtx.getLoadTableWork(), pCtx.getLoadFileWork(), pCtx.getContext(), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java index ff971ac..2cfa6f8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator; import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator; import org.apache.hadoop.hive.ql.exec.ConditionalTask; @@ -101,8 +102,8 @@ public TezCompiler() { } @Override - public void init(HiveConf conf, LogHelper console, Hive db) { - super.init(conf, console, db); + public void init(QueryState queryState, LogHelper console, Hive db) { + super.init(queryState, console, db); // Tez requires us to use RPC for the query plan HiveConf.setBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN, true); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java index 5b4365c..b8771d2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.hooks.Entity; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; @@ -52,8 +53,8 @@ boolean useSuper = false; - public UpdateDeleteSemanticAnalyzer(HiveConf conf) throws SemanticException { - super(conf); + public UpdateDeleteSemanticAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java index 80ccb28..52186b4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java @@ -187,7 +187,7 @@ private void handlePartialScanCommand(TableScanOperator tableScan, ParseContext @SuppressWarnings("unchecked") Task partialScanTask = TaskFactory.get(scanWork, parseContext.getConf()); - partialScanTask.initialize(parseContext.getConf(), null, driverCxt, + partialScanTask.initialize(parseContext.getQueryState(), null, driverCxt, tableScan.getCompilationOpContext()); partialScanTask.setWork(scanWork); statsWork.setSourceTask(partialScanTask); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 23b8a96..a7350fc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -118,7 +118,7 @@ /** * current configuration. */ - private final HiveConf conf; + private final HiveConf sessionConf; /** * silent mode. @@ -131,7 +131,7 @@ protected boolean isVerbose; /** - * Is the query served from HiveServer2 + * The flag to indicate if the session serves the queries from HiveServer2 or not. */ private boolean isHiveServerQuery = false; @@ -167,11 +167,6 @@ */ protected File tmpErrOutputFile; - /** - * type of the command. - */ - private HiveOperation commandType; - private String lastCommand; private HiveAuthorizationProvider authorizer; @@ -279,7 +274,7 @@ public LineageState getLineageState() { } public HiveConf getConf() { - return conf; + return sessionConf; } @@ -308,8 +303,8 @@ public void deleteTmpErrOutputFile() { } public boolean getIsSilent() { - if(conf != null) { - return conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT); + if(sessionConf != null) { + return sessionConf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT); } else { return isSilent; } @@ -320,8 +315,8 @@ public boolean isHiveServerQuery() { } public void setIsSilent(boolean isSilent) { - if(conf != null) { - conf.setBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT, isSilent); + if(sessionConf != null) { + sessionConf.setBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT, isSilent); } this.isSilent = isSilent; } @@ -347,7 +342,7 @@ public SessionState(HiveConf conf) { } public SessionState(HiveConf conf, String userName) { - this.conf = conf; + this.sessionConf = conf; this.userName = userName; if (LOG.isDebugEnabled()) { LOG.debug("SessionState user: " + userName); @@ -367,23 +362,11 @@ public SessionState(HiveConf conf, String userName) { parentLoader = SessionState.class.getClassLoader(); // Make sure that each session has its own UDFClassloader. For details see {@link UDFClassLoader} final ClassLoader currentLoader = Utilities.createUDFClassLoader((URLClassLoader) parentLoader, new String[]{}); - this.conf.setClassLoader(currentLoader); + this.sessionConf.setClassLoader(currentLoader); resourceDownloader = new ResourceDownloader(conf, HiveConf.getVar(conf, ConfVars.DOWNLOADED_RESOURCES_DIR)); } - public void setCmd(String cmdString) { - conf.setQueryString(cmdString); - } - - public String getCmd() { - return (conf.getQueryString()); - } - - public String getQueryId() { - return (conf.getVar(HiveConf.ConfVars.HIVEQUERYID)); - } - public Map getHiveVariables() { if (hiveVariables == null) { hiveVariables = new HashMap(); @@ -396,7 +379,7 @@ public void setHiveVariables(Map hiveVariables) { } public String getSessionId() { - return (conf.getVar(HiveConf.ConfVars.HIVESESSIONID)); + return (sessionConf.getVar(HiveConf.ConfVars.HIVESESSIONID)); } public void updateThreadName() { @@ -449,9 +432,9 @@ public HiveTxnManager getTxnMgr() { public HadoopShims.HdfsEncryptionShim getHdfsEncryptionShim() throws HiveException { if (hdfsEncryptionShim == null) { try { - FileSystem fs = FileSystem.get(conf); + FileSystem fs = FileSystem.get(sessionConf); if ("hdfs".equals(fs.getUri().getScheme())) { - hdfsEncryptionShim = ShimLoader.getHadoopShims().createHdfsEncryptionShim(fs, conf); + hdfsEncryptionShim = ShimLoader.getHadoopShims().createHdfsEncryptionShim(fs, sessionConf); } else { LOG.info("Could not get hdfsEncryptionShim, it is only applicable to hdfs filesystem."); } @@ -549,9 +532,9 @@ private static void start(SessionState startSs, boolean isAsync, LogHelper conso // Hive object instance should be created with a copy of the conf object. If the conf is // shared with SessionState, other parts of the code might update the config, but // Hive.get(HiveConf) would not recognize the case when it needs refreshing - Hive.get(new HiveConf(startSs.conf)).getMSC(); + Hive.get(new HiveConf(startSs.sessionConf)).getMSC(); UserGroupInformation sessionUGI = Utils.getUGI(); - FileSystem.get(startSs.conf); + FileSystem.get(startSs.sessionConf); // Create scratch dirs for this session startSs.createSessionDirs(sessionUGI.getShortUserName()); @@ -603,9 +586,9 @@ private static void start(SessionState startSs, boolean isAsync, LogHelper conso } // Neither open nor opening. if (!isAsync) { - startSs.tezSessionState.open(startSs.conf); // should use conf on session start-up + startSs.tezSessionState.open(startSs.sessionConf); // should use conf on session start-up } else { - startSs.tezSessionState.beginOpen(startSs.conf, null, console); + startSs.tezSessionState.beginOpen(startSs.sessionConf, null, console); } } catch (Exception e) { throw new RuntimeException(e); @@ -700,7 +683,7 @@ private Path createRootHDFSDir(HiveConf conf) throws IOException { * @return * @throws IOException */ - private void createPath(HiveConf conf, Path path, String permission, boolean isLocal, + private static void createPath(HiveConf conf, Path path, String permission, boolean isLocal, boolean isCleanUp) throws IOException { FsPermission fsPermission = new FsPermission(permission); FileSystem fs; @@ -790,18 +773,18 @@ private void setupAuth() { } try { - authenticator = HiveUtils.getAuthenticator(conf, + authenticator = HiveUtils.getAuthenticator(sessionConf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER); authenticator.setSessionState(this); - String clsStr = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); - authorizer = HiveUtils.getAuthorizeProviderManager(conf, + String clsStr = HiveConf.getVar(sessionConf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); + authorizer = HiveUtils.getAuthorizeProviderManager(sessionConf, clsStr, authenticator, true); if (authorizer == null) { // if it was null, the new (V2) authorization plugin must be specified in // config - HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(conf, + HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(sessionConf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder(); @@ -810,12 +793,12 @@ private void setupAuth() { authzContextBuilder.setSessionString(getSessionId()); authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), - conf, authenticator, authzContextBuilder.build()); + sessionConf, authenticator, authzContextBuilder.build()); setAuthorizerV2Config(); } // create the create table grants with new config - createTableGrants = CreateTableAutomaticGrant.create(conf); + createTableGrants = CreateTableAutomaticGrant.create(sessionConf); } catch (HiveException e) { LOG.error("Error setting up authorization: " + e.getMessage(), e); @@ -831,23 +814,23 @@ private void setupAuth() { private void setAuthorizerV2Config() throws HiveException { // avoid processing the same config multiple times, check marker - if (conf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) { + if (sessionConf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) { return; } - String metastoreHook = conf.get(ConfVars.METASTORE_FILTER_HOOK.name()); + String metastoreHook = sessionConf.get(ConfVars.METASTORE_FILTER_HOOK.name()); if (!ConfVars.METASTORE_FILTER_HOOK.getDefaultValue().equals(metastoreHook) && !AuthorizationMetaStoreFilterHook.class.getName().equals(metastoreHook)) { LOG.warn(ConfVars.METASTORE_FILTER_HOOK.name() + " will be ignored, since hive.security.authorization.manager" + " is set to instance of HiveAuthorizerFactory."); } - conf.setVar(ConfVars.METASTORE_FILTER_HOOK, + sessionConf.setVar(ConfVars.METASTORE_FILTER_HOOK, AuthorizationMetaStoreFilterHook.class.getName()); - authorizerV2.applyAuthorizationConfigPolicy(conf); + authorizerV2.applyAuthorizationConfigPolicy(sessionConf); // update config in Hive thread local as well and init the metastore client try { - Hive.get(conf).getMSC(); + Hive.get(sessionConf).getMSC(); } catch (Exception e) { // catch-all due to some exec time dependencies on session state // that would cause ClassNoFoundException otherwise @@ -855,7 +838,7 @@ private void setAuthorizerV2Config() throws HiveException { } // set a marker that this conf has been processed. - conf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString()); + sessionConf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString()); } public Object getActiveAuthorizer() { @@ -1097,7 +1080,7 @@ static void validateFiles(List newFiles) throws IllegalArgumentException public void reloadAuxJars() throws IOException { final Set reloadedAuxJars = new HashSet(); - final String renewableJarPath = conf.getVar(ConfVars.HIVERELOADABLEJARS); + final String renewableJarPath = sessionConf.getVar(ConfVars.HIVERELOADABLEJARS); // do nothing if this property is not specified or empty if (renewableJarPath == null || renewableJarPath.isEmpty()) { return; @@ -1127,7 +1110,7 @@ public void reloadAuxJars() throws IOException { currentCLoader = (URLClassLoader) Utilities.addToClassPath(currentCLoader, reloadedAuxJars.toArray(new String[0])); - conf.setClassLoader(currentCLoader); + sessionConf.setClassLoader(currentCLoader); Thread.currentThread().setContextClassLoader(currentCLoader); } preReloadableAuxJars.clear(); @@ -1367,21 +1350,6 @@ public void delete_resources(ResourceType t) { } } - public String getCommandType() { - if (commandType == null) { - return null; - } - return commandType.getOperationName(); - } - - public HiveOperation getHiveOperation() { - return commandType; - } - - public void setCommandType(HiveOperation commandType) { - this.commandType = commandType; - } - public HiveAuthorizationProvider getAuthorizer() { setupAuth(); return authorizer; @@ -1472,7 +1440,7 @@ public void setCurrentDatabase(String currentDatabase) { public void close() throws IOException { registry.clear(); if (txnMgr != null) txnMgr.closeTxnManager(); - JavaUtils.closeClassLoadersTo(conf.getClassLoader(), parentLoader); + JavaUtils.closeClassLoadersTo(sessionConf.getClassLoader(), parentLoader); File resourceDir = new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR)); LOG.debug("Removing resource dir " + resourceDir); @@ -1499,7 +1467,7 @@ public void close() throws IOException { try { closeSparkSession(); registry.closeCUDFLoaders(); - dropSessionPaths(conf); + dropSessionPaths(sessionConf); unCacheDataNucleusClassLoaders(); } finally { // removes the threadlocal variables, closes underlying HMS connection @@ -1509,10 +1477,10 @@ public void close() throws IOException { private void unCacheDataNucleusClassLoaders() { try { - Hive threadLocalHive = Hive.get(conf); + Hive threadLocalHive = Hive.get(sessionConf); if ((threadLocalHive != null) && (threadLocalHive.getMSC() != null) && (threadLocalHive.getMSC().isLocalMetaStore())) { - if (conf.getVar(ConfVars.METASTORE_RAW_STORE_IMPL).equals(ObjectStore.class.getName())) { + if (sessionConf.getVar(ConfVars.METASTORE_RAW_STORE_IMPL).equals(ObjectStore.class.getName())) { ObjectStore.unCacheDataNucleusClassLoaders(); } } @@ -1640,9 +1608,9 @@ public void setupQueryCurrentTimestamp() { queryCurrentTimestamp = new Timestamp(System.currentTimeMillis()); // Provide a facility to set current timestamp during tests - if (conf.getBoolVar(ConfVars.HIVE_IN_TEST)) { + if (sessionConf.getBoolVar(ConfVars.HIVE_IN_TEST)) { String overrideTimestampString = - HiveConf.getVar(conf, HiveConf.ConfVars.HIVETESTCURRENTTIMESTAMP, (String)null); + HiveConf.getVar(sessionConf, HiveConf.ConfVars.HIVETESTCURRENTTIMESTAMP, (String)null); if (overrideTimestampString != null && overrideTimestampString.length() > 0) { queryCurrentTimestamp = Timestamp.valueOf(overrideTimestampString); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index eaeb66b..667d5c2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.DriverContext; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.WindowsPathUtil; import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; @@ -71,6 +72,7 @@ */ public class TestExecDriver extends TestCase { + static QueryState queryState; static HiveConf conf; private static final String tmpdir; @@ -82,7 +84,8 @@ static { try { - conf = new HiveConf(ExecDriver.class); + queryState = new QueryState(new HiveConf(ExecDriver.class)); + conf = queryState.getConf(); conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true); conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, true); @@ -480,7 +483,7 @@ private void executePlan() throws Exception { MapRedTask mrtask = new MapRedTask(); DriverContext dctx = new DriverContext (); mrtask.setWork(mr); - mrtask.initialize(conf, null, dctx, null); + mrtask.initialize(queryState, null, dctx, null); int exitVal = mrtask.execute(dctx); if (exitVal != 0) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java index 9d7166c..c659806 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.session.SessionState; @@ -36,16 +37,18 @@ private ParseDriver parseDriver; private MacroSemanticAnalyzer analyzer; + private QueryState queryState; private HiveConf conf; private Context context; @Before public void setup() throws Exception { - conf = new HiveConf(); + queryState = new QueryState(null); + conf = queryState.getConf(); SessionState.start(conf); context = new Context(conf); parseDriver = new ParseDriver(); - analyzer = new MacroSemanticAnalyzer(conf); + analyzer = new MacroSemanticAnalyzer(queryState); } private ASTNode parse(String command) throws Exception { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java index ccdf272..8fe4d02 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java @@ -18,9 +18,11 @@ package org.apache.hadoop.hive.ql.parse; import junit.framework.Assert; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; @@ -42,11 +44,13 @@ * Tests for parsing and semantic analysis of ALTER TABLE ... compact. */ public class TestQBCompact { + static QueryState queryState; static HiveConf conf; @BeforeClass public static void init() throws Exception { - conf = new HiveConf(); + queryState = new QueryState(null); + conf = queryState.getConf(); SessionState.start(conf); // Create a table so we can work against it @@ -65,7 +69,7 @@ public static void init() throws Exception { private AlterTableSimpleDesc parseAndAnalyze(String query) throws Exception { ParseDriver hd = new ParseDriver(); ASTNode head = (ASTNode)hd.parse(query).getChild(0); - BaseSemanticAnalyzer a = SemanticAnalyzerFactory.get(conf, head); + BaseSemanticAnalyzer a = SemanticAnalyzerFactory.get(queryState, head); a.analyze(head, new Context(conf)); List> roots = a.getRootTasks(); Assert.assertEquals(1, roots.size()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java index 70d86c1..e607f10 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java @@ -23,6 +23,7 @@ import junit.framework.Assert; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Before; import org.junit.BeforeClass; @@ -30,19 +31,21 @@ public class TestQBJoinTreeApplyPredicate { + static QueryState queryState; static HiveConf conf; SemanticAnalyzer sA; @BeforeClass public static void initialize() { - conf = new HiveConf(SemanticAnalyzer.class); + queryState = new QueryState(new HiveConf(SemanticAnalyzer.class)); + conf = queryState.getConf(); SessionState.start(conf); } @Before public void setup() throws SemanticException { - sA = new CalcitePlanner(conf); + sA = new CalcitePlanner(queryState); } static ASTNode constructIdentifier(String nm) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java index f6f0abb..f9db2c8 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java @@ -22,6 +22,7 @@ import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Assert; import org.junit.Before; @@ -29,6 +30,7 @@ import org.junit.Test; public class TestQBSubQuery { + static QueryState queryState; static HiveConf conf; private static String IN_QUERY = " select * " + @@ -46,14 +48,15 @@ @BeforeClass public static void initialize() { - conf = new HiveConf(SemanticAnalyzer.class); + queryState = new QueryState(new HiveConf(SemanticAnalyzer.class)); + conf = queryState.getConf(); SessionState.start(conf); } @Before public void setup() throws SemanticException { pd = new ParseDriver(); - sA = new CalcitePlanner(conf); + sA = new CalcitePlanner(queryState); } ASTNode parse(String query) throws ParseException { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java index 4a52efb..5849950 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java @@ -21,27 +21,30 @@ import org.antlr.runtime.CommonToken; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.QueryState; import org.junit.Before; import org.junit.Test; public class TestSemanticAnalyzerFactory { + private QueryState queryState; private HiveConf conf; @Before public void setup() throws Exception { - conf = new HiveConf(); + queryState = new QueryState(null); + conf = queryState.getConf(); } @Test public void testCreate() throws Exception { BaseSemanticAnalyzer analyzer = SemanticAnalyzerFactory. - get(conf, new ASTNode(new CommonToken(HiveParser.TOK_CREATEMACRO))); + get(queryState, new ASTNode(new CommonToken(HiveParser.TOK_CREATEMACRO))); Assert.assertTrue(analyzer.getClass().getSimpleName(), analyzer instanceof MacroSemanticAnalyzer); } @Test public void testDrop() throws Exception { BaseSemanticAnalyzer analyzer = SemanticAnalyzerFactory. - get(conf, new ASTNode(new CommonToken(HiveParser.TOK_DROPMACRO))); + get(queryState, new ASTNode(new CommonToken(HiveParser.TOK_DROPMACRO))); Assert.assertTrue(analyzer.getClass().getSimpleName(), analyzer instanceof MacroSemanticAnalyzer); } } \ No newline at end of file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java index d795324..70e522c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; @@ -47,6 +48,7 @@ static final private Logger LOG = LoggerFactory.getLogger(TestUpdateDeleteSemanticAnalyzer.class.getName()); + private QueryState queryState; private HiveConf conf; private Hive db; @@ -221,7 +223,8 @@ public void testInsertValuesPartitioned() throws Exception { @Before public void setup() { - conf = new HiveConf(); + queryState = new QueryState(null); + conf = queryState.getConf(); conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager"); @@ -258,7 +261,7 @@ private ReturnInfo parseAndAnalyze(String query, String testName) ASTNode tree = pd.parse(query, ctx); tree = ParseUtils.findRootNonNullToken(tree); - BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree); + BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree); SessionState.get().initTxnMgr(conf); db = sem.getDb(); @@ -297,7 +300,7 @@ private String explain(SemanticAnalyzer sem, QueryPlan plan, ASTNode astTree) th sem.getFetchTask(), astTree, sem, true, false, false, false, false, false, null); ExplainTask task = new ExplainTask(); task.setWork(work); - task.initialize(conf, plan, null, null); + task.initialize(queryState, plan, null, null); task.execute(null); FSDataInputStream in = fs.open(tmp); StringBuilder builder = new StringBuilder(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java index 2e85a2a..e8e29ee 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java @@ -22,8 +22,8 @@ import junit.framework.Assert; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.parse.ASTNode; @@ -46,10 +46,10 @@ * @return * @throws Exception */ - public static DDLWork analyze(ASTNode ast, HiveConf conf, Hive db) throws Exception { - DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(conf, db); - SessionState.start(conf); - analyzer.analyze(ast, new Context(conf)); + public static DDLWork analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception { + DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(queryState, db); + SessionState.start(queryState.getConf()); + analyzer.analyze(ast, new Context(queryState.getConf())); List> rootTasks = analyzer.getRootTasks(); return (DDLWork) inList(rootTasks).ofSize(1).get(0).getWork(); } @@ -62,8 +62,8 @@ public static DDLWork analyze(ASTNode ast, HiveConf conf, Hive db) throws Except * @return * @throws Exception */ - public static DDLWork analyze(String command, HiveConf conf, Hive db) throws Exception { - return analyze(parse(command), conf, db); + public static DDLWork analyze(String command, QueryState queryState, Hive db) throws Exception { + return analyze(parse(command), queryState, db); } private static ASTNode parse(String command) throws Exception { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java index e3b82f3..b324dfd 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.GrantDesc; @@ -34,9 +35,9 @@ protected static final String TABLE_QNAME = DB + "." + TABLE; protected static final String USER = "user1"; - public static void grantUserTable(String privStr, PrivilegeType privType, HiveConf conf, Hive db) + public static void grantUserTable(String privStr, PrivilegeType privType, QueryState queryState, Hive db) throws Exception { - DDLWork work = AuthorizationTestUtil.analyze("GRANT " + privStr + " ON TABLE " + TABLE + " TO USER " + USER, conf, db); + DDLWork work = AuthorizationTestUtil.analyze("GRANT " + privStr + " ON TABLE " + TABLE + " TO USER " + USER, queryState, db); GrantDesc grantDesc = work.getGrantDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java index 038e5fd..bfacbd0 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java @@ -19,12 +19,13 @@ import java.util.HashMap; -import junit.framework.Assert; +import org.junit.Assert; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; @@ -89,7 +90,7 @@ public static void reset() { private ParseDriver parseDriver; private DDLSemanticAnalyzer analyzer; - private HiveConf conf; + private QueryState queryState; private Context context; private String currentUser; private Hive db; @@ -98,7 +99,8 @@ public static void reset() { @Before public void setup() throws Exception { - conf = new HiveConf(); + queryState = new QueryState(null); + HiveConf conf = queryState.getConf(); conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY, TestHiveAuthorizationTaskFactory.DummyHiveAuthorizationTaskFactoryImpl.class.getName()); db = Mockito.mock(Hive.class); @@ -107,7 +109,7 @@ public void setup() throws Exception { SessionState.start(conf); context = new Context(conf); parseDriver = new ParseDriver(); - analyzer = new DDLSemanticAnalyzer(conf, db); + analyzer = new DDLSemanticAnalyzer(queryState, db); Mockito.when(db.getTable(DB, TABLE, false)).thenReturn(table); Mockito.when(db.getTable(TABLE_QNAME, false)).thenReturn(table); Mockito.when(db.getPartition(table, new HashMap(), false)) @@ -478,7 +480,7 @@ public void testGrantServer() throws Exception { } private DDLWork analyze(String command) throws Exception { - return AuthorizationTestUtil.analyze(command, conf, db); + return AuthorizationTestUtil.analyze(command, queryState, db); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java index c614630..41584af 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java @@ -22,6 +22,7 @@ import junit.framework.Assert; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; @@ -33,18 +34,18 @@ public class TestPrivilegesV1 extends PrivilegesTestBase{ - private HiveConf conf; + private QueryState queryState; private Hive db; private Table table; private Partition partition; @Before public void setup() throws Exception { - conf = new HiveConf(); + queryState = new QueryState(null); db = Mockito.mock(Hive.class); table = new Table(DB, TABLE); partition = new Partition(table); - SessionState.start(conf); + SessionState.start(queryState.getConf()); Mockito.when(db.getTable(DB, TABLE, false)).thenReturn(table); Mockito.when(db.getTable(TABLE_QNAME, false)).thenReturn(table); Mockito.when(db.getPartition(table, new HashMap(), false)) @@ -81,6 +82,6 @@ public void testPrivInGrantNotAccepted() throws Exception{ } private void grantUserTable(String privName, PrivilegeType privType) throws Exception { - grantUserTable(privName, privType, conf, db); + grantUserTable(privName, privType, queryState, db); } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java index 7b28375..c552ba7 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java @@ -20,6 +20,7 @@ import java.util.HashMap; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; @@ -32,15 +33,16 @@ public class TestPrivilegesV2 extends PrivilegesTestBase{ - private HiveConf conf; + private QueryState queryState; private Hive db; private Table table; private Partition partition; @Before public void setup() throws Exception { - conf = new HiveConf(); + queryState = new QueryState(null); //set authorization mode to V2 + HiveConf conf = queryState.getConf(); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, SQLStdHiveAuthorizerFactory.class.getName()); db = Mockito.mock(Hive.class); @@ -67,7 +69,7 @@ public void testPrivInGrant() throws Exception{ } private void grantUserTable(String privName, PrivilegeType privType) throws Exception { - grantUserTable(privName, privType, conf, db); + grantUserTable(privName, privType, queryState, db); } } diff --git a/service/src/java/org/apache/hive/service/cli/operation/Operation.java b/service/src/java/org/apache/hive/service/cli/operation/Operation.java index d9a273b..4b3747d 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java @@ -32,7 +32,7 @@ import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; import org.apache.hadoop.hive.common.metrics.common.MetricsScope; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.OperationLog; import org.apache.hadoop.hive.ql.session.SessionState; @@ -61,7 +61,6 @@ private volatile OperationState state = OperationState.INITIALIZED; private volatile MetricsScope currentStateScope; private final OperationHandle opHandle; - private HiveConf configuration; public static final FetchOrientation DEFAULT_FETCH_ORIENTATION = FetchOrientation.FETCH_NEXT; public static final Logger LOG = LoggerFactory.getLogger(Operation.class.getName()); public static final long DEFAULT_FETCH_MAX_ROWS = 100; @@ -80,13 +79,13 @@ protected long operationStart; protected long operationComplete; + protected final QueryState queryState; + protected static final EnumSet DEFAULT_FETCH_ORIENTATION_SET = EnumSet.of(FetchOrientation.FETCH_NEXT,FetchOrientation.FETCH_FIRST); protected Operation(HiveSession parentSession, OperationType opType, boolean runInBackground) { this(parentSession, null, opType, runInBackground); - // Generate a queryId for the operation if no queryId is provided - confOverlay.put(HiveConf.ConfVars.HIVEQUERYID.varname, QueryPlan.makeQueryId()); } protected Operation(HiveSession parentSession, Map confOverlay, OperationType opType, boolean runInBackground) { @@ -101,6 +100,11 @@ protected Operation(HiveSession parentSession, Map confOverlay, operationTimeout = HiveConf.getTimeVar(parentSession.getHiveConf(), HiveConf.ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT, TimeUnit.MILLISECONDS); setMetrics(state); + queryState = new QueryState(parentSession.getHiveConf(), confOverlay, runAsync); + } + + public QueryState getQueryState() { + return queryState; } public Future getBackgroundHandle() { @@ -115,13 +119,6 @@ public boolean shouldRunAsync() { return runAsync; } - public void setConfiguration(HiveConf configuration) { - this.configuration = new HiveConf(configuration); - } - - public HiveConf getConfiguration() { - return new HiveConf(configuration); - } public HiveSession getParentSession() { return parentSession; diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java index 04d816a..efa8e16 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -123,10 +124,10 @@ private void setupSessionIO(SessionState sessionState) { * @param sqlOperationConf * @throws HiveSQLException */ - public void prepare(HiveConf sqlOperationConf) throws HiveSQLException { + public void prepare(QueryState queryState) throws HiveSQLException { setState(OperationState.RUNNING); try { - driver = new Driver(sqlOperationConf, getParentSession().getUserName()); + driver = new Driver(queryState, getParentSession().getUserName()); sqlOpDisplay.setQueryDisplay(driver.getQueryDisplay()); // set the operation handle information in Driver, so that thrift API users @@ -179,7 +180,7 @@ public void prepare(HiveConf sqlOperationConf) throws HiveSQLException { } } - private void runQuery(HiveConf sqlOperationConf) throws HiveSQLException { + private void runQuery() throws HiveSQLException { try { // In Hive server mode, we are not able to retry in the FetchTask // case, when calling fetch queries since execute() has returned. @@ -211,11 +212,10 @@ private void runQuery(HiveConf sqlOperationConf) throws HiveSQLException { @Override public void runInternal() throws HiveSQLException { setState(OperationState.PENDING); - final HiveConf opConfig = getConfigForOperation(); - prepare(opConfig); + prepare(queryState); if (!shouldRunAsync()) { - runQuery(opConfig); + runQuery(); } else { // We'll pass ThreadLocals in the background thread from the foreground (handler) thread final SessionState parentSessionState = SessionState.get(); @@ -224,7 +224,7 @@ public void runInternal() throws HiveSQLException { final Hive parentHive = parentSession.getSessionHive(); // Current UGI will get used by metastore when metsatore is in embedded mode // So this needs to get passed to the new background thread - final UserGroupInformation currentUGI = getCurrentUGI(opConfig); + final UserGroupInformation currentUGI = getCurrentUGI(); // Runnable impl to call runInternal asynchronously, // from a different thread Runnable backgroundOperation = new Runnable() { @@ -239,7 +239,7 @@ public Object run() throws HiveSQLException { registerCurrentOperationLog(); registerLoggingContext(); try { - runQuery(opConfig); + runQuery(); } catch (HiveSQLException e) { setOperationException(e); LOG.error("Error running hive query: ", e); @@ -290,7 +290,7 @@ public Object run() throws HiveSQLException { * @return UserGroupInformation * @throws HiveSQLException */ - private UserGroupInformation getCurrentUGI(HiveConf opConfig) throws HiveSQLException { + private UserGroupInformation getCurrentUGI() throws HiveSQLException { try { return Utils.getUGI(); } catch (Exception e) { @@ -500,34 +500,6 @@ private SerDe getSerDe() throws SQLException { } /** - * If there are query specific settings to overlay, then create a copy of config - * There are two cases we need to clone the session config that's being passed to hive driver - * 1. Async query - - * If the client changes a config setting, that shouldn't reflect in the execution already underway - * 2. confOverlay - - * The query specific settings should only be applied to the query config and not session - * @return new configuration - * @throws HiveSQLException - */ - public HiveConf getConfigForOperation() throws HiveSQLException { - HiveConf sqlOperationConf = getParentSession().getHiveConf(); - if (!confOverlay.isEmpty() || shouldRunAsync()) { - // clone the partent session config for this query - sqlOperationConf = new HiveConf(sqlOperationConf); - - // apply overlay query specific settings, if any - for (Map.Entry confEntry : confOverlay.entrySet()) { - try { - sqlOperationConf.verifyAndSet(confEntry.getKey(), confEntry.getValue()); - } catch (IllegalArgumentException e) { - throw new HiveSQLException("Error applying statement specific settings", e); - } - } - } - return sqlOperationConf; - } - - /** * Get summary information of this SQLOperation for display in WebUI. */ public SQLOperationDisplay getSQLOperationDisplay() { diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperationDisplay.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperationDisplay.java index d2ca1e7..2406025 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperationDisplay.java +++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperationDisplay.java @@ -40,7 +40,7 @@ public SQLOperationDisplay(SQLOperation sqlOperation) throws HiveSQLException { this.state = sqlOperation.getState(); this.userName = sqlOperation.getParentSession().getUserName(); - this.executionEngine = sqlOperation.getConfigForOperation().getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE); + this.executionEngine = sqlOperation.getQueryState().getConf().getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE); this.beginTime = System.currentTimeMillis(); this.operationId = sqlOperation.getHandle().getHandleIdentifier().toString(); } diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index 80a1844..97898a5 100644 --- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -84,7 +84,7 @@ private final SessionHandle sessionHandle; private String username; private final String password; - private final HiveConf hiveConf; + private final HiveConf sessionConf; private final long creationTime; // TODO: some SessionState internals are not thread safe. The compile-time internals are synced // via session-scope or global compile lock. The run-time internals work by magic! @@ -118,25 +118,25 @@ public HiveSessionImpl(SessionHandle sessionHandle, TProtocolVersion protocol, S this.password = password; creationTime = System.currentTimeMillis(); this.sessionHandle = sessionHandle != null ? sessionHandle : new SessionHandle(protocol); - this.hiveConf = new HiveConf(serverhiveConf); + this.sessionConf = new HiveConf(serverhiveConf); this.ipAddress = ipAddress; try { // In non-impersonation mode, map scheduler queue to current user // if fair scheduler is configured. - if (! hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) && - hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_MAP_FAIR_SCHEDULER_QUEUE)) { - ShimLoader.getHadoopShims().refreshDefaultQueue(hiveConf, username); + if (! sessionConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) && + sessionConf.getBoolVar(ConfVars.HIVE_SERVER2_MAP_FAIR_SCHEDULER_QUEUE)) { + ShimLoader.getHadoopShims().refreshDefaultQueue(sessionConf, username); } } catch (IOException e) { LOG.warn("Error setting scheduler queue: " + e, e); } // Set an explicit session name to control the download directory name - hiveConf.set(ConfVars.HIVESESSIONID.varname, + sessionConf.set(ConfVars.HIVESESSIONID.varname, this.sessionHandle.getHandleIdentifier().toString()); // Use thrift transportable formatter - hiveConf.set(ListSinkOperator.OUTPUT_FORMATTER, + sessionConf.set(ListSinkOperator.OUTPUT_FORMATTER, FetchFormatter.ThriftFormatter.class.getName()); - hiveConf.setInt(ListSinkOperator.OUTPUT_PROTOCOL, protocol.getValue()); + sessionConf.setInt(ListSinkOperator.OUTPUT_PROTOCOL, protocol.getValue()); } public HiveSessionImpl(TProtocolVersion protocol, String username, String password, @@ -156,7 +156,7 @@ public HiveSessionImpl(TProtocolVersion protocol, String username, String passwo * That's why it is important to create SessionState here rather than in the constructor. */ public void open(Map sessionConfMap) throws HiveSQLException { - sessionState = new SessionState(hiveConf, username); + sessionState = new SessionState(sessionConf, username); sessionState.setUserIpAddress(ipAddress); sessionState.setIsHiveServerQuery(true); SessionState.start(sessionState); @@ -221,7 +221,7 @@ private void processGlobalInitFile() { IHiveFileProcessor processor = new GlobalHivercFileProcessor(); try { - String hiverc = hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION); + String hiverc = sessionConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION); if (hiverc != null) { File hivercFile = new File(hiverc); if (hivercFile.isDirectory()) { @@ -255,7 +255,7 @@ private void configureSession(Map sessionConfMap) throws HiveSQL } else if (key.startsWith("use:")) { SessionState.get().setCurrentDatabase(entry.getValue()); } else { - hiveConf.verifyAndSet(key, entry.getValue()); + sessionConf.verifyAndSet(key, entry.getValue()); } } } @@ -376,8 +376,8 @@ public String getPassword() { @Override public HiveConf getHiveConf() { - hiveConf.setVar(HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE, FETCH_WORK_SERDE_CLASS); - return hiveConf; + sessionConf.setVar(HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE, FETCH_WORK_SERDE_CLASS); + return sessionConf; } @Override @@ -438,16 +438,6 @@ private OperationHandle executeStatementInternal(String statement, Map(); - } else { - Map conf = new HashMap(); - conf.putAll(confOverlay); - confOverlay = conf; - } - confOverlay.put(HiveConf.ConfVars.HIVEQUERYID.varname, QueryPlan.makeQueryId()); - OperationManager operationManager = getOperationManager(); ExecuteStatementOperation operation = operationManager .newExecuteStatementOperation(getSession(), statement, confOverlay, runAsync); @@ -574,7 +564,7 @@ public OperationHandle getTableTypes() public OperationHandle getColumns(String catalogName, String schemaName, String tableName, String columnName) throws HiveSQLException { acquire(true); - String addedJars = Utilities.getResourceFiles(hiveConf, SessionState.ResourceType.JAR); + String addedJars = Utilities.getResourceFiles(sessionConf, SessionState.ResourceType.JAR); if (StringUtils.isNotBlank(addedJars)) { IMetaStoreClient metastoreClient = getSession().getMetaStoreClient(); metastoreClient.setHiveAddedJars(addedJars); @@ -784,7 +774,7 @@ public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientatio if (fetchType == FetchType.QUERY_OUTPUT) { return operationManager.getOperationNextRowSet(opHandle, orientation, maxRows); } - return operationManager.getOperationLogRowSet(opHandle, orientation, maxRows, hiveConf); + return operationManager.getOperationLogRowSet(opHandle, orientation, maxRows, sessionConf); } finally { release(true); }