From 993c519fd5696374e1485e9003fe382153ebfa25 Mon Sep 17 00:00:00 2001 From: Karan Mehta Date: Wed, 1 Mar 2017 16:48:50 -0800 Subject: [PATCH] HBASE-17716 Formalize Scan Metric names --- .../hadoop/hbase/client/ScannerCallable.java | 5 ++-- .../hadoop/hbase/client/metrics/MetricType.java | 26 +++++++++++++++++++ .../hadoop/hbase/client/metrics/ScanMetrics.java | 18 ++++++------- .../client/metrics/ServerSideScanMetrics.java | 30 ++++++++++------------ .../apache/hadoop/hbase/protobuf/ProtobufUtil.java | 3 ++- .../hadoop/hbase/shaded/protobuf/ProtobufUtil.java | 9 ++++--- .../hbase/shaded/protobuf/ResponseConverter.java | 7 ++--- .../hbase/mapreduce/TableRecordReaderImpl.java | 4 ++- .../hadoop/hbase/regionserver/RSRpcServices.java | 7 ++--- .../TestServerSideScanMetricsFromClientSide.java | 25 +++++++++--------- 10 files changed, 83 insertions(+), 51 deletions(-) create mode 100644 hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/MetricType.java diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java index 0682a7a..24dc4ff 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.client.metrics.MetricType; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.exceptions.ScannerResetException; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; @@ -342,8 +343,8 @@ public class ScannerCallable extends ClientServiceCallable { private void updateServerSideMetrics(ScanResponse response) { if (this.scanMetrics == null || response == null || !response.hasScanMetrics()) return; - Map serverMetrics = ResponseConverter.getScanMetrics(response); - for (Entry entry : serverMetrics.entrySet()) { + Map serverMetrics = ResponseConverter.getScanMetrics(response); + for (Entry entry : serverMetrics.entrySet()) { this.scanMetrics.addToCounter(entry.getKey(), entry.getValue()); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/MetricType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/MetricType.java new file mode 100644 index 0000000..eb7a1f3 --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/MetricType.java @@ -0,0 +1,26 @@ +package org.apache.hadoop.hbase.client.metrics; + +public enum MetricType { + + RPC_CALLS("Number of RPC calls"), + REMOTE_RPC_CALLS("Number of remote RPC calls"), + MILLIS_BETWEEN_NEXTS("Sum of milliseconds between sequential next calls"), + NOT_SERVING_REGION_EXCEPTION("Number of NotServingRegionException caught"), + BYTES_IN_RESULTS("Number of bytes in Result objects from region servers"), + BYTES_IN_REMOTE_RESULTS("Number of bytes in Result objects from remote region servers"), + REGIONS_SCANNED("Number of regions"), + RPC_RETRIES("Number of RPC retries"), + REMOTE_RPC_RETRIES("Number of remote RPC retries"), + ROWS_SCANNED("Number of rows scanned"), + ROWS_FILTERED("Number of rows filtered"); + + private final String description; + + private MetricType(String description) { + this.description = description; + } + + public String description() { + return description; + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java index ec2c937..a7c5bd4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java @@ -47,47 +47,47 @@ public class ScanMetrics extends ServerSideScanMetrics { /** * number of RPC calls */ - public final AtomicLong countOfRPCcalls = createCounter("RPC_CALLS"); + public final AtomicLong countOfRPCcalls = createCounter(MetricType.RPC_CALLS); /** * number of remote RPC calls */ - public final AtomicLong countOfRemoteRPCcalls = createCounter("REMOTE_RPC_CALLS"); + public final AtomicLong countOfRemoteRPCcalls = createCounter(MetricType.REMOTE_RPC_CALLS); /** * sum of milliseconds between sequential next calls */ - public final AtomicLong sumOfMillisSecBetweenNexts = createCounter("MILLIS_BETWEEN_NEXTS"); + public final AtomicLong sumOfMillisSecBetweenNexts = createCounter(MetricType.MILLIS_BETWEEN_NEXTS); /** * number of NotServingRegionException caught */ - public final AtomicLong countOfNSRE = createCounter("NOT_SERVING_REGION_EXCEPTION"); + public final AtomicLong countOfNSRE = createCounter(MetricType.NOT_SERVING_REGION_EXCEPTION); /** * number of bytes in Result objects from region servers */ - public final AtomicLong countOfBytesInResults = createCounter("BYTES_IN_RESULTS"); + public final AtomicLong countOfBytesInResults = createCounter(MetricType.BYTES_IN_RESULTS); /** * number of bytes in Result objects from remote region servers */ - public final AtomicLong countOfBytesInRemoteResults = createCounter("BYTES_IN_REMOTE_RESULTS"); + public final AtomicLong countOfBytesInRemoteResults = createCounter(MetricType.BYTES_IN_REMOTE_RESULTS); /** * number of regions */ - public final AtomicLong countOfRegions = createCounter("REGIONS_SCANNED"); + public final AtomicLong countOfRegions = createCounter(MetricType.REGIONS_SCANNED); /** * number of RPC retries */ - public final AtomicLong countOfRPCRetries = createCounter("RPC_RETRIES"); + public final AtomicLong countOfRPCRetries = createCounter(MetricType.RPC_RETRIES); /** * number of remote RPC retries */ - public final AtomicLong countOfRemoteRPCRetries = createCounter("REMOTE_RPC_RETRIES"); + public final AtomicLong countOfRemoteRPCRetries = createCounter(MetricType.REMOTE_RPC_RETRIES); /** * constructor diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java index 46b67d4..bff3b9d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hbase.client.metrics; -import java.util.HashMap; +import java.util.EnumMap; import java.util.Map; +import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -35,38 +36,35 @@ public class ServerSideScanMetrics { /** * Hash to hold the String -> Atomic Long mappings for each metric */ - private final Map counters = new HashMap(); + private final Map counters = new EnumMap(MetricType.class); /** * Create a new counter with the specified name - * @param counterName + * @param rpcCalls * @return {@link AtomicLong} instance for the counter with counterName */ - protected AtomicLong createCounter(String counterName) { + protected AtomicLong createCounter(MetricType counterName) { AtomicLong c = new AtomicLong(0); counters.put(counterName, c); return c; } - public static final String COUNT_OF_ROWS_SCANNED_KEY = "ROWS_SCANNED"; - public static final String COUNT_OF_ROWS_FILTERED_KEY = "ROWS_FILTERED"; - /** * number of rows filtered during scan RPC */ - public final AtomicLong countOfRowsFiltered = createCounter(COUNT_OF_ROWS_FILTERED_KEY); + public final AtomicLong countOfRowsFiltered = createCounter(MetricType.ROWS_FILTERED); /** * number of rows scanned during scan RPC. Not every row scanned will be returned to the client * since rows may be filtered. */ - public final AtomicLong countOfRowsScanned = createCounter(COUNT_OF_ROWS_SCANNED_KEY); + public final AtomicLong countOfRowsScanned = createCounter(MetricType.ROWS_SCANNED); /** * @param counterName * @param value */ - public void setCounter(String counterName, long value) { + public void setCounter(MetricType counterName, long value) { AtomicLong c = this.counters.get(counterName); if (c != null) { c.set(value); @@ -77,7 +75,7 @@ public class ServerSideScanMetrics { * @param counterName * @return true if a counter exists with the counterName */ - public boolean hasCounter(String counterName) { + public boolean hasCounter(MetricType counterName) { return this.counters.containsKey(counterName); } @@ -85,7 +83,7 @@ public class ServerSideScanMetrics { * @param counterName * @return {@link AtomicLong} instance for this counter name, null if counter does not exist. */ - public AtomicLong getCounter(String counterName) { + public AtomicLong getCounter(MetricType counterName) { return this.counters.get(counterName); } @@ -93,7 +91,7 @@ public class ServerSideScanMetrics { * @param counterName * @param delta */ - public void addToCounter(String counterName, long delta) { + public void addToCounter(MetricType counterName, long delta) { AtomicLong c = this.counters.get(counterName); if (c != null) { c.addAndGet(delta); @@ -105,11 +103,11 @@ public class ServerSideScanMetrics { * reset all AtomicLongs in the instance back to 0. * @return A Map of String -> Long for metrics */ - public Map getMetricsMap() { + public Map getMetricsMap() { // Create a builder - ImmutableMap.Builder builder = ImmutableMap.builder(); + ImmutableMap.Builder builder = ImmutableMap.builder(); // For every entry add the value and reset the AtomicLong back to zero - for (Map.Entry e : this.counters.entrySet()) { + for (Entry e : this.counters.entrySet()) { builder.put(e.getKey(), e.getValue().getAndSet(0)); } // Build the immutable map so that people can't mess around with it. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 52ee8a5..25d803b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.SnapshotType; +import org.apache.hadoop.hbase.client.metrics.MetricType; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.ByteArrayComparable; @@ -1646,7 +1647,7 @@ public final class ProtobufUtil { if (pScanMetrics != null) { for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) { if (pair.hasName() && pair.hasValue()) { - scanMetrics.setCounter(pair.getName(), pair.getValue()); + scanMetrics.setCounter(MetricType.valueOf(pair.getName()), pair.getValue()); } } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index 271a0de..46a095c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -77,6 +77,7 @@ import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan.ReadType; import org.apache.hadoop.hbase.client.SnapshotDescription; import org.apache.hadoop.hbase.client.SnapshotType; +import org.apache.hadoop.hbase.client.metrics.MetricType; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.client.security.SecurityCapability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -2045,7 +2046,7 @@ public final class ProtobufUtil { if (pScanMetrics != null) { for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) { if (pair.hasName() && pair.hasValue()) { - scanMetrics.setCounter(pair.getName(), pair.getValue()); + scanMetrics.setCounter(MetricType.valueOf(pair.getName()), pair.getValue()); } } } @@ -2054,11 +2055,11 @@ public final class ProtobufUtil { public static MapReduceProtos.ScanMetrics toScanMetrics(ScanMetrics scanMetrics) { MapReduceProtos.ScanMetrics.Builder builder = MapReduceProtos.ScanMetrics.newBuilder(); - Map metrics = scanMetrics.getMetricsMap(); - for (Entry e : metrics.entrySet()) { + Map metrics = scanMetrics.getMetricsMap(); + for (Entry e : metrics.entrySet()) { HBaseProtos.NameInt64Pair nameInt64Pair = HBaseProtos.NameInt64Pair.newBuilder() - .setName(e.getKey()) + .setName(e.getKey().name()) .setValue(e.getValue()) .build(); builder.addMetrics(nameInt64Pair); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java index cbcad80..a6493ad 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.SingleResponse; +import org.apache.hadoop.hbase.client.metrics.MetricType; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionForSplitOrMergeResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse; @@ -425,8 +426,8 @@ public final class ResponseConverter { return results; } - public static Map getScanMetrics(ScanResponse response) { - Map metricMap = new HashMap(); + public static Map getScanMetrics(ScanResponse response) { + Map metricMap = new HashMap(); if (response == null || !response.hasScanMetrics() || response.getScanMetrics() == null) { return metricMap; } @@ -436,7 +437,7 @@ public final class ResponseConverter { for (int i = 0; i < numberOfMetrics; i++) { NameInt64Pair metricPair = metrics.getMetrics(i); if (metricPair != null) { - String name = metricPair.getName(); + MetricType name = MetricType.valueOf(metricPair.getName()); Long value = metricPair.getValue(); if (name != null && value != null) { metricMap.put(name, value); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java index 6f1d140..4540723 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.lang.reflect.Method; import java.util.Map; +import java.util.Map.Entry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.ScannerCallable; import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.metrics.MetricType; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -289,7 +291,7 @@ public class TableRecordReaderImpl { } try { - for (Map.Entry entry:scanMetrics.getMetricsMap().entrySet()) { + for (Entry entry:scanMetrics.getMetricsMap().entrySet()) { Counter ct = (Counter)getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME, entry.getKey()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index e6c2a49..6138385 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -75,6 +75,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.VersionInfoUtil; +import org.apache.hadoop.hbase.client.metrics.MetricType; import org.apache.hadoop.hbase.conf.ConfigurationObserver; import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException; @@ -2890,12 +2891,12 @@ public class RSRpcServices implements HBaseRPCErrorHandler, // Check to see if the client requested that we track metrics server side. If the // client requested metrics, retrieve the metrics from the scanner context. if (trackMetrics) { - Map metrics = scannerContext.getMetrics().getMetricsMap(); + Map metrics = scannerContext.getMetrics().getMetricsMap(); ScanMetrics.Builder metricBuilder = ScanMetrics.newBuilder(); NameInt64Pair.Builder pairBuilder = NameInt64Pair.newBuilder(); - for (Entry entry : metrics.entrySet()) { - pairBuilder.setName(entry.getKey()); + for (Entry entry : metrics.entrySet()) { + pairBuilder.setName(entry.getKey().name()); pairBuilder.setValue(entry.getValue()); metricBuilder.addMetrics(pairBuilder.build()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java index b516cbb..682d1d0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java @@ -22,6 +22,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.metrics.MetricType; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.client.metrics.ServerSideScanMetrics; import org.apache.hadoop.hbase.filter.BinaryComparator; @@ -188,41 +189,41 @@ public class TestServerSideScanMetricsFromClientSide { public void testRowsSeenMetric(Scan baseScan) throws Exception { Scan scan; scan = new Scan(baseScan); - testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY, NUM_ROWS); + testMetric(scan, MetricType.ROWS_SCANNED, NUM_ROWS); for (int i = 0; i < ROWS.length - 1; i++) { scan = new Scan(baseScan); scan.setStartRow(ROWS[0]); scan.setStopRow(ROWS[i + 1]); - testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY, i + 1); + testMetric(scan, MetricType.ROWS_SCANNED, i + 1); } for (int i = ROWS.length - 1; i > 0; i--) { scan = new Scan(baseScan); scan.setStartRow(ROWS[i - 1]); scan.setStopRow(ROWS[ROWS.length - 1]); - testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY, ROWS.length - i); + testMetric(scan, MetricType.ROWS_SCANNED, ROWS.length - i); } // The filter should filter out all rows, but we still expect to see every row. Filter filter = new RowFilter(CompareOp.EQUAL, new BinaryComparator("xyz".getBytes())); scan = new Scan(baseScan); scan.setFilter(filter); - testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY, ROWS.length); + testMetric(scan, MetricType.ROWS_SCANNED, ROWS.length); // Filter should pass on all rows SingleColumnValueFilter singleColumnValueFilter = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS[0], CompareOp.EQUAL, VALUE); scan = new Scan(baseScan); scan.setFilter(singleColumnValueFilter); - testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY, ROWS.length); + testMetric(scan, MetricType.ROWS_SCANNED, ROWS.length); // Filter should filter out all rows singleColumnValueFilter = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS[0], CompareOp.NOT_EQUAL, VALUE); scan = new Scan(baseScan); scan.setFilter(singleColumnValueFilter); - testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY, ROWS.length); + testMetric(scan, MetricType.ROWS_SCANNED, ROWS.length); } @Test @@ -305,17 +306,17 @@ public class TestServerSideScanMetricsFromClientSide { throws Exception { Scan scan = new Scan(baseScan); if (filter != null) scan.setFilter(filter); - testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_FILTERED_KEY, expectedNumFiltered); + testMetric(scan, MetricType.ROWS_SCANNED, expectedNumFiltered); } /** - * Run the scan to completetion and check the metric against the specified value + * Run the scan to completion and check the metric against the specified value * @param scan * @param metricKey * @param expectedValue * @throws Exception */ - public void testMetric(Scan scan, String metricKey, long expectedValue) throws Exception { + public void testMetric(Scan scan, MetricType metricType, long expectedValue) throws Exception { assertTrue("Scan should be configured to record metrics", scan.isScanMetricsEnabled()); ResultScanner scanner = TABLE.getScanner(scan); @@ -325,9 +326,9 @@ public class TestServerSideScanMetricsFromClientSide { scanner.close(); ScanMetrics metrics = scan.getScanMetrics(); assertTrue("Metrics are null", metrics != null); - assertTrue("Metric : " + metricKey + " does not exist", metrics.hasCounter(metricKey)); - final long actualMetricValue = metrics.getCounter(metricKey).get(); - assertEquals("Metric: " + metricKey + " Expected: " + expectedValue + " Actual: " + assertTrue("Metric : " + metricType.name() + " does not exist", metrics.hasCounter(metricType)); + final long actualMetricValue = metrics.getCounter(metricType).get(); + assertEquals("Metric: " + metricType.description() + " Expected: " + expectedValue + " Actual: " + actualMetricValue, expectedValue, actualMetricValue); } -- 2.10.1 (Apple Git-78)