diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java index 6c60125..155c65d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java @@ -19,11 +19,11 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.service.cli.OperationHandle; import org.apache.hive.service.cli.SessionHandle; -import org.apache.hive.service.cli.operation.SQLOperationDisplay; import org.apache.hive.service.rpc.thrift.TProtocolVersion; import org.apache.hive.service.server.HiveServer2; import org.apache.hive.tmpl.QueryProfileTmpl; @@ -69,25 +69,25 @@ public void testQueryDisplay() throws Exception { OperationHandle opHandle2 = session.executeStatement("show tables", null); - List liveSqlOperations, historicSqlOperations; - liveSqlOperations = sessionManager.getOperationManager().getLiveSqlOperations(); - historicSqlOperations = sessionManager.getOperationManager().getHistoricalSQLOperations(); + List liveSqlOperations, historicSqlOperations; + liveSqlOperations = sessionManager.getOperationManager().getLiveQueryInfos(); + historicSqlOperations = sessionManager.getOperationManager().getHistoricalQueryInfos(); Assert.assertEquals(liveSqlOperations.size(), 2); Assert.assertEquals(historicSqlOperations.size(), 0); verifyDDL(liveSqlOperations.get(0), "show databases", opHandle1.getHandleIdentifier().toString(), false); verifyDDL(liveSqlOperations.get(1),"show tables", opHandle2.getHandleIdentifier().toString(), false); session.closeOperation(opHandle1); - liveSqlOperations = sessionManager.getOperationManager().getLiveSqlOperations(); - historicSqlOperations = sessionManager.getOperationManager().getHistoricalSQLOperations(); + liveSqlOperations = sessionManager.getOperationManager().getLiveQueryInfos(); + historicSqlOperations = sessionManager.getOperationManager().getHistoricalQueryInfos(); Assert.assertEquals(liveSqlOperations.size(), 1); Assert.assertEquals(historicSqlOperations.size(), 1); verifyDDL(historicSqlOperations.get(0),"show databases", opHandle1.getHandleIdentifier().toString(), true); verifyDDL(liveSqlOperations.get(0),"show tables", opHandle2.getHandleIdentifier().toString(), false); session.closeOperation(opHandle2); - liveSqlOperations = sessionManager.getOperationManager().getLiveSqlOperations(); - historicSqlOperations = sessionManager.getOperationManager().getHistoricalSQLOperations(); + liveSqlOperations = sessionManager.getOperationManager().getLiveQueryInfos(); + historicSqlOperations = sessionManager.getOperationManager().getHistoricalQueryInfos(); Assert.assertEquals(liveSqlOperations.size(), 0); Assert.assertEquals(historicSqlOperations.size(), 2); verifyDDL(historicSqlOperations.get(1),"show databases", opHandle1.getHandleIdentifier().toString(), true); @@ -123,23 +123,23 @@ public void testWebUI() throws Exception { session.close(); } - private void verifyDDL(SQLOperationDisplay display, String stmt, String handle, boolean finished) { + private void verifyDDL(QueryInfo queryInfo, String stmt, String handle, boolean finished) { - Assert.assertEquals(display.getUserName(), "testuser"); - Assert.assertEquals(display.getExecutionEngine(), "mr"); - Assert.assertEquals(display.getOperationId(), handle); - Assert.assertTrue(display.getBeginTime() > 0 && display.getBeginTime() <= System.currentTimeMillis()); + Assert.assertEquals(queryInfo.getUserName(), "testuser"); + Assert.assertEquals(queryInfo.getExecutionEngine(), "mr"); + Assert.assertEquals(queryInfo.getOperationId(), handle); + Assert.assertTrue(queryInfo.getBeginTime() > 0 && queryInfo.getBeginTime() <= System.currentTimeMillis()); if (finished) { - Assert.assertTrue(display.getEndTime() > 0 && display.getEndTime() >= display.getBeginTime() - && display.getEndTime() <= System.currentTimeMillis()); - Assert.assertTrue(display.getRuntime() > 0); + Assert.assertTrue(queryInfo.getEndTime() > 0 && queryInfo.getEndTime() >= queryInfo.getBeginTime() + && queryInfo.getEndTime() <= System.currentTimeMillis()); + Assert.assertTrue(queryInfo.getRuntime() > 0); } else { - Assert.assertNull(display.getEndTime()); + Assert.assertNull(queryInfo.getEndTime()); //For runtime, query may have finished. } - QueryDisplay qDisplay1 = display.getQueryDisplay(); + QueryDisplay qDisplay1 = queryInfo.getQueryDisplay(); Assert.assertNotNull(qDisplay1); Assert.assertEquals(qDisplay1.getQueryString(), stmt); Assert.assertNotNull(qDisplay1.getExplainPlan()); @@ -170,9 +170,9 @@ private void verifyDDL(SQLOperationDisplay display, String stmt, String handle, */ private void verifyDDLHtml(String stmt, String opHandle) throws Exception { StringWriter sw = new StringWriter(); - SQLOperationDisplay sod = sessionManager.getOperationManager().getSQLOperationDisplay( + QueryInfo queryInfo = sessionManager.getOperationManager().getQueryInfo( opHandle); - new QueryProfileTmpl().render(sw, sod); + new QueryProfileTmpl().render(sw, queryInfo); String html = sw.toString(); Assert.assertTrue(html.contains(stmt)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index a800046..6bff2e9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -95,7 +95,6 @@ import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContextImpl; import org.apache.hadoop.hive.ql.parse.ImportSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ParseContext; -import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.ParseUtils; import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; @@ -113,8 +112,6 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; -import org.apache.hadoop.hive.ql.session.OperationLog; -import org.apache.hadoop.hive.ql.session.OperationLog.LoggingLevel; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.serde2.ByteStream; @@ -137,6 +134,7 @@ private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); static final private LogHelper console = new LogHelper(LOG); static final int SHUTDOWN_HOOK_PRIORITY = 0; + private final QueryInfo queryInfo; private Runnable shutdownRunner = null; private int maxRows = 100; @@ -331,28 +329,33 @@ public void setMaxRows(int maxRows) { public Driver() { this(new QueryState((SessionState.get() != null) ? - SessionState.get().getConf() : new HiveConf()), null); + SessionState.get().getConf() : new HiveConf()), null, null); } public Driver(HiveConf conf) { - this(new QueryState(conf), null); + this(new QueryState(conf), null, null); } public Driver(HiveConf conf, Context ctx) { - this(new QueryState(conf), null); + this(new QueryState(conf), null, null); this.ctx = ctx; } public Driver(HiveConf conf, String userName) { - this(new QueryState(conf), userName); + this(new QueryState(conf), userName, null); } public Driver(QueryState queryState, String userName) { + this(queryState, userName, null); + } + + public Driver(QueryState queryState, String userName, QueryInfo queryInfo) { this.queryState = queryState; this.conf = queryState.getConf(); isParallelEnabled = (conf != null) && HiveConf.getBoolVar(conf, ConfVars.HIVE_SERVER2_PARALLEL_COMPILATION); this.userName = userName; + this.queryInfo = queryInfo; } /** @@ -1743,7 +1746,7 @@ public int execute(boolean deferClose) throws CommandNeedRetryException { hookContext = new HookContext(plan, queryState, ctx.getPathToCS(), ss.getUserFromAuthenticator(), ss.getUserIpAddress(), InetAddress.getLocalHost().getHostAddress(), operationId, - ss.getSessionId(), Thread.currentThread().getName(), ss.isHiveServerQuery(), perfLogger); + ss.getSessionId(), Thread.currentThread().getName(), ss.isHiveServerQuery(), perfLogger, queryInfo); hookContext.setHookType(HookContext.HookType.PRE_EXEC_HOOK); for (Hook peh : getHooks(HiveConf.ConfVars.PREEXECHOOKS)) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryInfo.java new file mode 100644 index 0000000..adb72a7 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryInfo.java @@ -0,0 +1,102 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql; + +/** + * The class is synchronized, as WebUI may access information about a running query. + */ +public class QueryInfo { + + private final String userName; + private final String executionEngine; + private final long beginTime; + private final String operationId; + private Long runtime; // tracks only running portion of the query. + + private Long endTime; + private String state; + private QueryDisplay queryDisplay; + + public QueryInfo(String state, String userName, String executionEngine, String operationId) { + this.state = state; + this.userName = userName; + this.executionEngine = executionEngine; + this.beginTime = System.currentTimeMillis(); + this.operationId = operationId; + } + + public synchronized long getElapsedTime() { + if (isRunning()) { + return System.currentTimeMillis() - beginTime; + } else { + return endTime - beginTime; + } + } + + public synchronized boolean isRunning() { + return endTime == null; + } + + public synchronized QueryDisplay getQueryDisplay() { + return queryDisplay; + } + + public synchronized void setQueryDisplay(QueryDisplay queryDisplay) { + this.queryDisplay = queryDisplay; + } + + public String getUserName() { + return userName; + } + + public String getExecutionEngine() { + return executionEngine; + } + + public synchronized String getState() { + return state; + } + + public long getBeginTime() { + return beginTime; + } + + public synchronized Long getEndTime() { + return endTime; + } + + public synchronized void updateState(String state) { + this.state = state; + } + + public String getOperationId() { + return operationId; + } + + public synchronized void setEndTime() { + this.endTime = System.currentTimeMillis(); + } + + public synchronized void setRuntime(long runtime) { + this.runtime = runtime; + } + + public synchronized Long getRuntime() { + return runtime; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java index 359c238..97ad3c7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java @@ -26,9 +26,11 @@ import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.QueryInfo; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskRunner; +import org.apache.hadoop.hive.ql.history.HiveHistory; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.optimizer.lineage.LineageCtx.Index; import org.apache.hadoop.hive.ql.session.SessionState; @@ -68,11 +70,12 @@ private final String threadId; private final boolean isHiveServerQuery; private final PerfLogger perfLogger; + private final QueryInfo queryInfo; public HookContext(QueryPlan queryPlan, QueryState queryState, Map inputPathToContentSummary, String userName, String ipAddress, String hiveInstanceAddress, String operationId, String sessionId, String threadId, - boolean isHiveServerQuery, PerfLogger perfLogger) throws Exception { + boolean isHiveServerQuery, PerfLogger perfLogger, QueryInfo queryInfo) throws Exception { this.queryPlan = queryPlan; this.queryState = queryState; this.conf = queryState.getConf(); @@ -95,6 +98,7 @@ public HookContext(QueryPlan queryPlan, QueryState queryState, this.threadId = threadId; this.isHiveServerQuery = isHiveServerQuery; this.perfLogger = perfLogger; + this.queryInfo = queryInfo; } public QueryPlan getQueryPlan() { @@ -232,4 +236,8 @@ public boolean isHiveServerQuery() { public PerfLogger getPerfLogger() { return perfLogger; } + + public QueryInfo getQueryInfo() { + return queryInfo; + } } diff --git a/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon b/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon index 07aa3c1..7d454c7 100644 --- a/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon +++ b/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon @@ -17,12 +17,12 @@ See the License for the specific language governing permissions and limitations under the License. <%args> -SQLOperationDisplay sod; +QueryInfo queryInfo; <%import> java.util.*; org.apache.hadoop.hive.ql.QueryDisplay; -org.apache.hive.service.cli.operation.SQLOperationDisplay; +org.apache.hadoop.hive.ql.QueryInfo;