diff --git common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java index ba2267b..0f082f6 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java @@ -225,6 +225,12 @@ public void addGauge(String name, MetricsVariable variable) { //Not implemented. } + @Override + public void addRatio(String name, MetricsVariable numerator, + MetricsVariable denominator) { + //Not implemented + } + public void set(String name, Object value) { metrics.put(name,value); } diff --git common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java index 9b263d9..8fb7c5a 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java @@ -93,4 +93,14 @@ * @param variable variable to track. */ public void addGauge(String name, final MetricsVariable variable); + + /** + * Add a ratio metric to track the correlation between two variables + * @param name name of the ratio gauge + * @param numerator numerator of the ratio + * @param denominator denominator of the ratio + */ + public void addRatio(String name, MetricsVariable numerator, + MetricsVariable denominator); + } diff --git common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java index c9d4087..b4a7dcc 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java @@ -67,5 +67,10 @@ public static final String HIVE_SPARK_TASKS = "hive_spark_tasks"; // The number of tez tasks executed by the HiveServer2 since the last restart public static final String HIVE_TEZ_TASKS = "hive_tez_tasks"; + public static final String HS2_OPEN_SESSIONS = "hs2_open_sessions"; + public static final String HS2_ACTIVE_SESSIONS = "hs2_active_sessions"; + public static final String HS2_ABANDONED_SESSIONS = "hs2_abandoned_sessions"; + public static final String HS2_AVG_OPEN_SESSION_TIME = "hs2_avg_open_session_time"; + public static final String HS2_AVG_ACTIVE_SESSION_TIME = "hs2_avg_active_session_time"; } \ No newline at end of file diff --git common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java index 9525b45..cd3d627 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java @@ -36,6 +36,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.github.joshelser.dropwizard.metrics.hadoop.HadoopMetrics2Reporter; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; @@ -292,13 +293,27 @@ public Object getValue() { return variable.getValue(); } }; + addGaugeInternal(name, gauge); + } + + @Override + public void addRatio(String name, MetricsVariable numerator, + MetricsVariable denominator) { + Preconditions.checkArgument(numerator != null, "Numerator must not be null"); + Preconditions.checkArgument(denominator != null, "Denominator must not be null"); + + MetricVariableRatioGauge gauge = new MetricVariableRatioGauge(numerator, denominator); + addGaugeInternal(name, gauge); + } + + private void addGaugeInternal(String name, Gauge gauge) { try { gaugesLock.lock(); gauges.put(name, gauge); // Metrics throws an Exception if we don't do this when the key already exists if (metricRegistry.getGauges().containsKey(name)) { LOGGER.warn("A Gauge with name [" + name + "] already exists. " - + " The old gauge will be overwritten, but this is not recommended"); + + " The old gauge will be overwritten, but this is not recommended"); metricRegistry.remove(name); } metricRegistry.register(name, gauge); diff --git common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/MetricVariableRatioGauge.java common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/MetricVariableRatioGauge.java new file mode 100644 index 0000000..3de5dd1 --- /dev/null +++ common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/MetricVariableRatioGauge.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.metrics.metrics2; + +import com.codahale.metrics.RatioGauge; +import org.apache.hadoop.hive.common.metrics.common.MetricsVariable; + +/** + * Combines two numeric metric variables into one gauge type metric displaying their ratio + */ +public class MetricVariableRatioGauge extends RatioGauge { + + private final MetricsVariable numerator; + private final MetricsVariable denominator; + + public MetricVariableRatioGauge(MetricsVariable numerator, + MetricsVariable denominator) { + this.numerator = numerator; + this.denominator = denominator; + } + + @Override + protected Ratio getRatio() { + Integer numValue = numerator.getValue(); + Integer denomValue = denominator.getValue(); + if(numValue != null && denomValue != null) { + return Ratio.of(numValue.doubleValue(), denomValue.doubleValue()); + } + return Ratio.of(0d,0d); + } +} diff --git common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java index 4667658..3bb7a1e 100644 --- common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java +++ common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java @@ -23,7 +23,6 @@ import java.io.File; import java.nio.file.Files; -import java.nio.file.Path; import java.nio.file.Paths; /** @@ -50,6 +49,12 @@ public static void verifyMetricsJson(String json, MetricsCategory category, Stri Assert.assertEquals(expectedValue.toString(), jsonNode.asText()); } + public static void verifyMetricsJson(String json, MetricsCategory category, String metricsName, + Double expectedValue, Double delta) throws Exception { + JsonNode jsonNode = getJsonNode(json, category, metricsName); + Assert.assertEquals(expectedValue, Double.valueOf(jsonNode.asText()), delta); + } + public static JsonNode getJsonNode(String json, MetricsCategory category, String metricsName) throws Exception { ObjectMapper objectMapper = new ObjectMapper(); JsonNode rootNode = objectMapper.readTree(json); diff --git common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestMetricVariableRatioGauge.java common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestMetricVariableRatioGauge.java new file mode 100644 index 0000000..eb92e65 --- /dev/null +++ common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestMetricVariableRatioGauge.java @@ -0,0 +1,115 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.metrics.metrics2; + +import com.codahale.metrics.MetricRegistry; +import org.apache.hadoop.hive.common.metrics.MetricsTestUtils; +import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; +import org.apache.hadoop.hive.common.metrics.common.MetricsVariable; +import org.apache.hadoop.hive.conf.HiveConf; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit test for the RatioGauge implementation. + */ +public class TestMetricVariableRatioGauge { + + public static MetricRegistry metricRegistry; + + @Before + public void before() throws Exception { + HiveConf conf = new HiveConf(); + conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, CodahaleMetrics.class.getCanonicalName()); + // disable json file writing + conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, "60000m"); + + MetricsFactory.init(conf); + metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance()).getMetricRegistry(); + } + + @After + public void after() throws Exception { + MetricsFactory.close(); + } + + @Test + public void testRatioIsCalculated() throws Exception { + NumericVariable num = new NumericVariable(10); + NumericVariable ord = new NumericVariable(5); + + MetricsFactory.getInstance().addRatio("rat", num, ord); + String json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "rat", 2d); + } + + @Test + public void testRatioIsCalculatedNonExact() throws Exception { + NumericVariable num = new NumericVariable(20); + NumericVariable ord = new NumericVariable(3); + + MetricsFactory.getInstance().addRatio("rat", num, ord); + String json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "rat", 6.6666d, 1e-4); + } + + @Test(expected = IllegalArgumentException.class) + public void testMissingNumeratorRatio() throws Exception { + MetricsFactory.getInstance().addRatio("rat", null, new NumericVariable(5)); + } + + @Test(expected = IllegalArgumentException.class) + public void testMissingDenominatorRatio() throws Exception { + MetricsFactory.getInstance().addRatio("rat", new NumericVariable(5), null); + } + + @Test + public void testEmptyRatio() throws Exception { + NumericVariable num = new NumericVariable(null); + NumericVariable ord = new NumericVariable(null); + + MetricsFactory.getInstance().addRatio("rat", num, ord); + String json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "rat", "NaN"); + } + + @Test + public void testZeroRatio() throws Exception { + NumericVariable num = new NumericVariable(10); + NumericVariable ord = new NumericVariable(0); + + MetricsFactory.getInstance().addRatio("rat", num, ord); + String json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "rat", "NaN"); + } + + private class NumericVariable implements MetricsVariable { + + private final Integer value; + + public NumericVariable(Integer value) { + this.value = value; + } + + @Override + public Integer getValue() { + return value; + } + } +} diff --git service/src/java/org/apache/hive/service/cli/session/SessionManager.java service/src/java/org/apache/hive/service/cli/session/SessionManager.java index 76e759f..26c8812 100644 --- service/src/java/org/apache/hive/service/cli/session/SessionManager.java +++ service/src/java/org/apache/hive/service/cli/session/SessionManager.java @@ -34,6 +34,8 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import com.google.common.base.Predicate; +import com.google.common.collect.Iterables; import org.apache.commons.io.FileUtils; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; @@ -95,9 +97,68 @@ public synchronized void init(HiveConf hiveConf) { createBackgroundOperationPool(); addService(operationManager); initSessionImplClassName(); + Metrics metrics = MetricsFactory.getInstance(); + if(metrics != null){ + registerOpenSesssionMetrics(metrics); + registerActiveSesssionMetrics(metrics); + } super.init(hiveConf); } + private void registerOpenSesssionMetrics(Metrics metrics) { + MetricsVariable openSessionCnt = new MetricsVariable() { + @Override + public Integer getValue() { + return getSessions().size(); + } + }; + MetricsVariable openSessionTime = new MetricsVariable() { + @Override + public Integer getValue() { + long sum = 0; + long currentTime = System.currentTimeMillis(); + for (HiveSession s : getSessions()) { + sum += currentTime - s.getCreationTime(); + } + // in case of an overflow return -1 + return (int) sum != sum ? -1 : (int) sum; + } + }; + metrics.addGauge(MetricsConstant.HS2_OPEN_SESSIONS, openSessionCnt); + metrics.addRatio(MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, openSessionTime, openSessionCnt); + } + + private void registerActiveSesssionMetrics(Metrics metrics) { + MetricsVariable activeSessionCnt = new MetricsVariable() { + @Override + public Integer getValue() { + Iterable filtered = Iterables.filter(getSessions(), new Predicate() { + @Override + public boolean apply(HiveSession hiveSession) { + return hiveSession.getNoOperationTime() == 0L; + } + }); + return Iterables.size(filtered); + } + }; + MetricsVariable activeSessionTime = new MetricsVariable() { + @Override + public Integer getValue() { + long sum = 0; + long currentTime = System.currentTimeMillis(); + for (HiveSession s : getSessions()) { + if (s.getNoOperationTime() == 0L) { + sum += currentTime - s.getLastAccessTime(); + } + } + // in case of an overflow return -1 + return (int) sum != sum ? -1 : (int) sum; + } + }; + metrics.addGauge(MetricsConstant.HS2_ACTIVE_SESSIONS, activeSessionCnt); + metrics.addRatio(MetricsConstant.HS2_AVG_ACTIVE_SESSION_TIME, activeSessionTime, activeSessionCnt); + } + private void initSessionImplClassName() { this.sessionImplclassName = hiveConf.getVar(ConfVars.HIVE_SESSION_IMPL_CLASSNAME); this.sessionImplWithUGIclassName = hiveConf.getVar(ConfVars.HIVE_SESSION_IMPL_WITH_UGI_CLASSNAME); @@ -208,6 +269,11 @@ public void run() { closeSession(handle); } catch (HiveSQLException e) { LOG.warn("Exception is thrown closing session " + handle, e); + } finally { + Metrics metrics = MetricsFactory.getInstance(); + if (metrics != null) { + metrics.incrementCounter(MetricsConstant.HS2_ABANDONED_SESSIONS); + } } } else { session.closeExpiredOperations(); diff --git service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java index 5511c54..cbcbeb0 100644 --- service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java +++ service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java @@ -18,32 +18,48 @@ package org.apache.hive.service.cli.session; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import org.apache.hadoop.hive.common.metrics.MetricsTestUtils; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; import org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics; import org.apache.hadoop.hive.common.metrics.metrics2.MetricsReporting; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hive.service.cli.FetchOrientation; +import org.apache.hive.service.cli.HiveSQLException; +import org.apache.hive.service.cli.OperationHandle; +import org.apache.hive.service.cli.OperationType; +import org.apache.hive.service.cli.RowSet; +import org.apache.hive.service.cli.SessionHandle; +import org.apache.hive.service.cli.TableSchema; +import org.apache.hive.service.cli.operation.MetadataOperation; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.rpc.thrift.TProtocolVersion; import org.apache.hive.service.server.HiveServer2; -import org.junit.BeforeClass; +import org.junit.Before; import org.junit.Test; -import java.io.File; +import java.util.HashMap; /** * Test metrics from SessionManager. */ public class TestSessionManagerMetrics { - private static SessionManager sm; - private static CodahaleMetrics metrics; + private SessionManager sm; + private CodahaleMetrics metrics; - @BeforeClass - public static void setup() throws Exception { + @Before + public void setup() throws Exception { HiveConf conf = new HiveConf(); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_THREADS, 2); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_WAIT_QUEUE_SIZE, 10); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_KEEPALIVE_TIME, "1000000s"); + conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_IDLE_SESSION_TIMEOUT, "500ms"); + conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_SESSION_CHECK_INTERVAL, "3s"); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true); @@ -57,6 +73,9 @@ public static void setup() throws Exception { sm.init(conf); metrics = (CodahaleMetrics) MetricsFactory.getInstance(); + + Hive doNothingHive = mock(Hive.class); + Hive.set(doNothingHive); } final Object barrier = new Object(); @@ -74,6 +93,34 @@ public void run() { } } + class BlockingOperation extends MetadataOperation { + + BlockingOperation(HiveSession parentSession, OperationType opType) { + super(parentSession, opType); + } + + @Override + protected void runInternal() throws HiveSQLException { + synchronized (barrier) { + try { + barrier.wait(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public TableSchema getResultSetSchema() throws HiveSQLException { + return null; + } + + @Override + public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException { + return null; + } + } + /** * Tests metrics regarding async thread pool. */ @@ -97,4 +144,181 @@ public void testThreadPoolMetrics() throws Exception { MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.EXEC_ASYNC_POOL_SIZE, 2); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.EXEC_ASYNC_QUEUE_SIZE, 0); } + + @Test + public void testOpenSessionMetrics() throws Exception { + + String json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_OPEN_SESSIONS, 0); + + SessionHandle handle = + sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", + new HashMap()); + + json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_OPEN_SESSIONS, 1); + + sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", + new HashMap()); + + json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_OPEN_SESSIONS, 2); + + sm.closeSession(handle); + + json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_OPEN_SESSIONS, 1); + } + + @Test + public void testOpenSessionTimeMetrics() throws Exception { + + String json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, + MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, "NaN"); + + long firstSessionOpen = System.currentTimeMillis(); + SessionHandle handle = + sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", + new HashMap()); + + json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, + (double)(System.currentTimeMillis() - firstSessionOpen), 100d); + + long secondSessionOpen = System.currentTimeMillis(); + sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", + new HashMap()); + + json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, + (double)(System.currentTimeMillis() - firstSessionOpen + + System.currentTimeMillis() - secondSessionOpen) / 2d, 100d); + + sm.closeSession(handle); + + json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, + (double)(System.currentTimeMillis() - secondSessionOpen), 100d); + + } + + @Test + public void testActiveSessionMetrics() throws Exception { + + String json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_ACTIVE_SESSIONS, 0); + + SessionHandle handle = + sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", + new HashMap()); + + final HiveSession session = sm.getSession(handle); + OperationManager operationManager = mock(OperationManager.class); + when(operationManager. + newGetTablesOperation(session, "catalog", "schema", "table", null)) + .thenReturn(new BlockingOperation(session, OperationType.GET_TABLES)); + session.setOperationManager(operationManager); + + new Thread(new Runnable() { + + @Override + public void run() { + try { + OperationHandle handle = session.getTables("catalog", "schema", "table", null); + session.closeOperation(handle); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + synchronized (barrier){ + barrier.notifyAll(); + } + } + } + }).start(); + + + json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_ACTIVE_SESSIONS, 1); + + synchronized (barrier) { + barrier.notifyAll(); + } + + synchronized (barrier){ + barrier.wait(); + } + json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_ACTIVE_SESSIONS, 0); + } + + @Test + public void testActiveSessionTimeMetrics() throws Exception { + + String json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, + MetricsConstant.HS2_AVG_ACTIVE_SESSION_TIME, "NaN"); + + SessionHandle handle = + sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", + new HashMap()); + + final HiveSession session = sm.getSession(handle); + OperationManager operationManager = mock(OperationManager.class); + when(operationManager. + newGetTablesOperation(session, "catalog", "schema", "table", null)) + .thenReturn(new BlockingOperation(session, OperationType.GET_TABLES)); + session.setOperationManager(operationManager); + + long sessionActivateTime = System.currentTimeMillis(); + new Thread(new Runnable() { + + @Override + public void run() { + try { + OperationHandle handle = session.getTables("catalog", "schema", "table", null); + session.closeOperation(handle); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + synchronized (barrier){ + barrier.notifyAll(); + } + } + } + }).start(); + + + json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_ACTIVE_SESSION_TIME, + (double)System.currentTimeMillis() - sessionActivateTime, 100d); + + synchronized (barrier) { + barrier.notifyAll(); + } + + synchronized (barrier){ + barrier.wait(); + } + json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, + MetricsConstant.HS2_AVG_ACTIVE_SESSION_TIME, "NaN"); + } + + + @Test + public void testAbandonedSessionMetrics() throws Exception { + + sm.start(); + String json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.HS2_ABANDONED_SESSIONS, ""); + + sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", + new HashMap()); + + Thread.sleep(3200); + + json = metrics.dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.HS2_ABANDONED_SESSIONS, 1); + } }