From 9a6fd897ea8198ba47f6a51a7039722c3eff5083 Mon Sep 17 00:00:00 2001 From: Ashu Pachauri Date: Mon, 2 Nov 2015 17:15:11 -0800 Subject: [PATCH] HBASE-14724: Per column family op-count metrics Adding per column family per region operation count metrics for the following operations: 1. Get 2. Mutate 3. Append 4. Delete 5. ScanNext 6. Increment --- .../java/org/apache/hadoop/hbase/util/Bytes.java | 14 ++++ .../hbase/regionserver/MetricsRegionSource.java | 37 +++++---- .../regionserver/MetricsRegionSourceImpl.java | 87 ++++++++++++++++++++-- .../apache/hadoop/hbase/regionserver/HRegion.java | 34 +++++++-- .../hadoop/hbase/regionserver/MetricsRegion.java | 25 ++++--- .../hadoop/hbase/regionserver/RSRpcServices.java | 17 ++++- .../hbase/regionserver/TestMetricsRegion.java | 36 +++++++++ 7 files changed, 208 insertions(+), 42 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index 685e402..015f6a4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -29,6 +29,7 @@ import java.math.BigInteger; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.security.SecureRandom; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; @@ -532,6 +533,19 @@ public class Bytes implements Comparable { } /** + * Converts an iterable collection of utf8 encoded bytes to strings. + * @param bytes The collection of utf8 encoded bytes. + * @return List of strings where each string represents an element of the collection. + */ + public static List toString(Iterable bytes) { + List strings = new ArrayList<>(); + for(byte [] b: bytes) { + strings.add(toString(b)); + } + return strings; + } + + /** * @param b Presumed UTF-8 encoded byte array. * @return String made from b */ diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java index 874be31..18590f2 100644 --- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java +++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.regionserver; +import java.util.List; /** * This interface will be implemented to allow single regions to push metrics into @@ -46,35 +47,43 @@ public interface MetricsRegionSource extends Comparable { void close(); /** - * Update related counts of puts. + * Update related count of Puts for the given column families. + * @param families Names of column families. */ - void updatePut(); + void updatePut(final List families); /** - * Update related counts of deletes. + * Update related count of deletes for the the given column families + * @param families Names of column families. */ - void updateDelete(); + void updateDelete(final List families); /** - * Update count and sizes of gets. - * @param getSize size in bytes of the resulting key values for a get + * Update total size of gets and their count for given column families. + * @param families Names of column families. + * @param getSize Size in bytes of the resulting key values for a get. */ - void updateGet(long getSize); + void updateGet(final List families, long getSize); /** - * Update the count and sizes of resultScanner.next() - * @param scanSize Size in bytes of the resulting key values for a next() + * Update count of increments for the given column families. + * @param families Names of column families. */ - void updateScan(long scanSize); + void updateIncrement(final List families); + /** - * Update related counts of increments. + * Update related count of appends for the given column families. + * @param families Names of column families. */ - void updateIncrement(); + void updateAppend(final List families); /** - * Update related counts of appends. + * Update size of scans and count of resultScanner.next() calls for the given column families. + * @param families Names of column families. + * @param scanSize Size in bytes of resulting key values for a next(). */ - void updateAppend(); + void updateScan(final List families, long scanSize); + /** * Get the aggregate source to which this reports. diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java index b31e71d..a6072e7 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java @@ -18,6 +18,11 @@ package org.apache.hadoop.hbase.regionserver; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.logging.Log; @@ -46,6 +51,7 @@ public class MetricsRegionSourceImpl implements MetricsRegionSource { private final DynamicMetricsRegistry registry; private final String regionNamePrefix; + private final String familyNamePrefix; private final String regionPutKey; private final String regionDeleteKey; private final String regionGetKey; @@ -59,8 +65,16 @@ public class MetricsRegionSourceImpl implements MetricsRegionSource { private final MutableCounterLong regionAppend; private final MutableHistogram regionGet; private final MutableHistogram regionScanNext; + private final Map familyGetMap; + private final Map familyPutMap; + private final Map familyDeleteMap; + private final Map familyScanMap; + private final Map familyIncrMap; + private final Map familyAppendMap; private final int hashCode; + private final List NULL_FAMILY = Arrays.asList("unknown"); + public MetricsRegionSourceImpl(MetricsRegionWrapper regionWrapper, MetricsRegionAggregateSourceImpl aggregate) { this.regionWrapper = regionWrapper; @@ -77,6 +91,11 @@ public class MetricsRegionSourceImpl implements MetricsRegionSource { "_region_" + regionWrapper.getRegionName() + "_metric_"; + familyNamePrefix = "Namespace" + regionWrapper.getNamespace() + + "_table_" + regionWrapper.getTableName() + + "_region_" + regionWrapper.getRegionName() + + "_cf_"; + String suffix = "Count"; regionPutKey = regionNamePrefix + MetricsRegionServerSource.MUTATE_KEY + suffix; @@ -97,6 +116,13 @@ public class MetricsRegionSourceImpl implements MetricsRegionSource { regionScanNextKey = regionNamePrefix + MetricsRegionServerSource.SCAN_NEXT_KEY; regionScanNext = registry.newHistogram(regionScanNextKey); + familyGetMap = new ConcurrentHashMap<>(); + familyPutMap = new ConcurrentHashMap<>(); + familyAppendMap = new ConcurrentHashMap<>(); + familyDeleteMap = new ConcurrentHashMap<>(); + familyIncrMap = new ConcurrentHashMap<>(); + familyScanMap = new ConcurrentHashMap<>(); + hashCode = regionWrapper.getRegionHashCode(); } @@ -129,38 +155,87 @@ public class MetricsRegionSourceImpl implements MetricsRegionSource { registry.removeHistogramMetrics(regionGetKey); registry.removeHistogramMetrics(regionScanNextKey); + removeMetrics(familyGetMap.keySet()); + removeMetrics(familyPutMap.keySet()); + removeMetrics(familyAppendMap.keySet()); + removeMetrics(familyDeleteMap.keySet()); + removeMetrics(familyIncrMap.keySet()); + removeMetrics(familyScanMap.keySet()); + regionWrapper = null; } } + private void removeMetrics(Iterable metrics) { + for (String m: metrics) { + registry.removeMetric(m); + } + } + @Override - public void updatePut() { + public void updatePut(List families) { regionPut.incr(); + incrementFamilyNumOps(families, familyPutMap, MetricsRegionServerSource.MUTATE_KEY); } @Override - public void updateDelete() { + public void updateDelete(List families) { regionDelete.incr(); + incrementFamilyNumOps(families, familyDeleteMap, MetricsRegionServerSource.DELETE_KEY); } @Override - public void updateGet(long getSize) { + public void updateGet(List families, long getSize) { regionGet.add(getSize); + incrementFamilyNumOps(families, familyGetMap, MetricsRegionServerSource.GET_KEY); } @Override - public void updateScan(long scanSize) { + public void updateScan(List families, long scanSize) { regionScanNext.add(scanSize); + incrementFamilyNumOps(families, familyScanMap, MetricsRegionServerSource.SCAN_NEXT_KEY); } @Override - public void updateIncrement() { + public void updateIncrement(List families) { regionIncrement.incr(); + incrementFamilyNumOps(families, familyIncrMap, MetricsRegionServerSource.INCREMENT_KEY); } @Override - public void updateAppend() { + public void updateAppend(List families) { regionAppend.incr(); + incrementFamilyNumOps(families, familyAppendMap, MetricsRegionServerSource.APPEND_KEY); + } + + private void incrementFamilyNumOps(List families, + Map familyMap, String opKey) { + if (families == null) { + families = NULL_FAMILY; + } + for(String family: families) { + MutableCounterLong familyOpCount = familyMap.get(family); + if (familyOpCount != null) { + familyOpCount.incr(); + } else { + addNewFamily(family, familyMap, opKey); + familyOpCount = familyMap.get(family); + familyOpCount.incr(); + } + } + } + + private void addNewFamily(String family, + Map familyMap, String opKey) { + + synchronized (familyMap) { + if(!familyMap.containsKey(family)) { + String familyOpKey = familyNamePrefix + family + + "_metric_" + opKey + "Count"; + MutableCounterLong count = registry.getLongCounter(familyOpKey, 0L); + familyMap.put(family, count); + } + } } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index e987bc6..c5ef3fe 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -3234,13 +3234,21 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi if (noOfPuts > 0) { // There were some Puts in the batch. if (this.metricsRegion != null) { - this.metricsRegion.updatePut(); + List familyNames = null; + if (putsCfSetConsistent) { + familyNames = Bytes.toString(putsCfSet); + } + this.metricsRegion.updatePut(familyNames); } } if (noOfDeletes > 0) { // There were some Deletes in the batch. if (this.metricsRegion != null) { - this.metricsRegion.updateDelete(); + List familyNames = null; + if (deletesCfSetConsistent) { + familyNames = Bytes.toString(deletesCfSet); + } + this.metricsRegion.updateDelete(familyNames); } } if (!success) { @@ -6644,12 +6652,13 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi coprocessorHost.postGet(get, results); } - metricsUpdateForGet(results); + Set familySet = get.familySet(); + metricsUpdateForGet(familySet, results); return results; } - void metricsUpdateForGet(List results) { + void metricsUpdateForGet(Set familySet, List results) { if (this.metricsRegion != null) { long totalSize = 0L; for (Cell cell : results) { @@ -6657,7 +6666,12 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // to know the serialization of how the codec works with it?? totalSize += CellUtil.estimatedSerializedSizeOf(cell); } - this.metricsRegion.updateGet(totalSize); + if (familySet != null && familySet.size() == 0) { + familySet = this.getTableDesc().getFamiliesKeys(); + } + + List familyNames = Bytes.toString(familySet); + this.metricsRegion.updateGet(familyNames, totalSize); } } @@ -7203,7 +7217,9 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi } if (this.metricsRegion != null) { - this.metricsRegion.updateAppend(); + Set familySet = mutate.getFamilyCellMap().keySet(); + List familyNames = Bytes.toString(familySet); + this.metricsRegion.updateAppend(familyNames); } if (flush) { @@ -7232,7 +7248,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi Operation op = Operation.INCREMENT; byte [] row = mutation.getRow(); checkRow(row, op.toString()); - checkFamilies(mutation.getFamilyCellMap().keySet()); + Set familySet = mutation.getFamilyCellMap().keySet(); + checkFamilies(familySet); boolean flush = false; Durability durability = getEffectiveDurability(mutation.getDurability()); boolean writeToWAL = durability != Durability.SKIP_WAL; @@ -7427,7 +7444,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi } closeRegionOperation(Operation.INCREMENT); if (this.metricsRegion != null) { - this.metricsRegion.updateIncrement(); + List familyNames = Bytes.toString(familySet); + this.metricsRegion.updateIncrement(familyNames); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegion.java index 48395a3..0392740 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegion.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; +import java.util.List; /** * This is the glue between the HRegion and whatever hadoop shim layer @@ -41,28 +42,28 @@ public class MetricsRegion { source.close(); } - public void updatePut() { - source.updatePut(); + public void updateGet(final List families, final long getSize) { + source.updateGet(families, getSize); } - public void updateDelete() { - source.updateDelete(); + public void updatePut(final List families) { + source.updatePut(families); } - public void updateGet(final long getSize) { - source.updateGet(getSize); + public void updateDelete(final List families) { + source.updateDelete(families); } - public void updateScanNext(final long scanSize) { - source.updateScan(scanSize); + public void updateScanNext(final List families, final long scanSize) { + source.updateScan(families, scanSize); } - public void updateAppend() { - source.updateAppend(); + public void updateAppend(final List families) { + source.updateAppend(families); } - public void updateIncrement() { - source.updateIncrement(); + public void updateIncrement(final List families) { + source.updateIncrement(families); } MetricsRegionSource getSource() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 5729334..167c98f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -24,6 +24,7 @@ import java.net.BindException; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; @@ -228,6 +229,8 @@ public class RSRpcServices implements HBaseRPCErrorHandler, private final AtomicLong scannerIdGen = new AtomicLong(0L); private final ConcurrentHashMap scanners = new ConcurrentHashMap(); + private final ConcurrentHashMap > scannerFamilies = + new ConcurrentHashMap<>(); /** * The lease timeout period for client scanners (milliseconds). @@ -355,6 +358,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, @Override public void leaseExpired() { RegionScannerHolder rsh = scanners.remove(this.scannerName); + scannerFamilies.remove(this.scannerName); if (rsh != null) { RegionScanner s = rsh.s; LOG.info("Scanner " + this.scannerName + " lease expired on region " @@ -2078,7 +2082,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, if (region.getCoprocessorHost() != null) { region.getCoprocessorHost().postGet(get, results); } - region.metricsUpdateForGet(results); + region.metricsUpdateForGet(get.familySet(), results); return Result.create(results, get.isCheckExistenceOnly() ? !results.isEmpty() : null, stale); } @@ -2352,6 +2356,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, boolean moreResults = true; boolean closeScanner = false; boolean isSmallScan = false; + List scanFamilies = null; RegionScanner actualRegionScanner = null; ScanResponse.Builder builder = ScanResponse.newBuilder(); if (request.hasCloseScanner()) { @@ -2363,6 +2368,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, } if (request.hasScannerId()) { rsh = scanners.get(scannerName); + scanFamilies = scannerFamilies.get(request.getScannerId()); if (rsh == null) { LOG.info("Client tried to access missing scanner " + scannerName); throw new UnknownScannerException( @@ -2415,6 +2421,10 @@ public class RSRpcServices implements HBaseRPCErrorHandler, scannerId = this.scannerIdGen.incrementAndGet(); scannerName = String.valueOf(scannerId); rsh = addScanner(scannerName, scanner, region); + + byte [][] families = scan.getFamilies(); + scanFamilies = Bytes.toString(Arrays.asList(families)); + scannerFamilies.put(scannerId, scanFamilies); ttl = this.scannerLeaseTimeoutPeriod; } assert scanner != null; @@ -2610,7 +2620,8 @@ public class RSRpcServices implements HBaseRPCErrorHandler, } } region.updateReadRequestsCount(i); - region.getMetrics().updateScanNext(totalCellSize); + region.getMetrics().updateScanNext(scanFamilies, totalCellSize); + if (regionServer.metricsRegionServer != null) { regionServer.metricsRegionServer.updateScannerNext(totalCellSize); } @@ -2668,6 +2679,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, } } rsh = scanners.remove(scannerName); + scannerFamilies.remove(scannerName); if (rsh != null) { if (context != null) { context.setCallBack(rsh.closeCallBack); @@ -2697,6 +2709,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, } catch (IOException ie) { if (scannerName != null && ie instanceof NotServingRegionException) { RegionScannerHolder rsh = scanners.remove(scannerName); + scannerFamilies.remove(scannerName); if (rsh != null) { try { RegionScanner scanner = rsh.s; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java index e739890..94aa599 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java @@ -25,6 +25,9 @@ import org.apache.hadoop.hbase.test.MetricsAssertHelper; import org.junit.Test; import org.junit.experimental.categories.Category; +import java.util.Arrays; +import java.util.List; + @Category({RegionServerTests.class, SmallTests.class}) public class TestMetricsRegion { @@ -67,4 +70,37 @@ public class TestMetricsRegion { 1, agg); mr.close(); } + + @Test + public void testRegionMetrics() { + MetricsRegion mr = new MetricsRegion(new MetricsRegionWrapperStub()); + MetricsRegionAggregateSource agg = mr.getSource().getAggregateSource(); + + String prefix = "namespace_TestNS_table_MetricsRegionWrapperStub_region_DEADBEEF001_cf_"; + List cfs = Arrays.asList("cf1", "cf2"); + + mr.updateGet(cfs, 1000); + HELPER.assertCounter( prefix + "cf1_metric_getCount", 1, agg); + + mr.updatePut(cfs); + mr.updatePut(null); + HELPER.assertCounter(prefix + "cf1_metric_mutateCount", 1, agg); + HELPER.assertCounter(prefix + "unknown_metric_mutateCount", 1, agg); + + mr.updateAppend(cfs.subList(0,1)); + HELPER.assertCounter(prefix + "cf1_metric_appendCount", 1, agg); + + mr.updateScanNext(null, 1000); + HELPER.assertCounter(prefix + "unknown_metric_scanNextCount", 1, agg); + + mr.updateDelete(cfs); + HELPER.assertCounter(prefix + "cf1_metric_deleteCount", 1, agg); + + mr.updateIncrement(null); + mr.updateIncrement(cfs); + mr.updateIncrement(cfs.subList(0, 1)); + HELPER.assertCounter(prefix + "unknown_metric_incrementCount", 1, agg); + HELPER.assertCounter(prefix + "cf1_metric_incrementCount", 2, agg); + HELPER.assertCounter(prefix + "cf2_metric_incrementCount", 1, agg); + } } -- 1.9.5