diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java index 80b5cd2..a4d6358 100644 --- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java +++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java @@ -207,12 +207,23 @@ public interface MetricsRegionServerSource extends BaseSource { String BLOCK_CACHE_SIZE_DESC = "Size of the block cache."; String BLOCK_CACHE_HIT_COUNT = "blockCacheHitCount"; String BLOCK_CACHE_HIT_COUNT_DESC = "Count of the hit on the block cache."; + String BLOCK_CACHE_PRIMARY_HIT_COUNT = "primary.blockCacheHitCount"; + String BLOCK_CACHE_PRIMARY_HIT_COUNT_DESC = "Count of hit on primary replica in the block cache."; String BLOCK_CACHE_MISS_COUNT = "blockCacheMissCount"; String BLOCK_COUNT_MISS_COUNT_DESC = "Number of requests for a block that missed the block cache."; + String BLOCK_CACHE_PRIMARY_MISS_COUNT = "primary.blockCacheMissCount"; + String BLOCK_COUNT_PRIMARY_MISS_COUNT_DESC = + "Number of requests for a block of primary replica that missed the block cache."; String BLOCK_CACHE_EVICTION_COUNT = "blockCacheEvictionCount"; String BLOCK_CACHE_EVICTION_COUNT_DESC = "Count of the number of blocks evicted from the block cache."; + String L1_BLOCK_CACHE_EVICTION_COUNT = "l1.blockCacheEvictionCount"; + String L1_BLOCK_CACHE_EVICTION_COUNT_DESC = + "Count of the number of blocks evicted from the L1 block cache."; + String BLOCK_CACHE_PRIMARY_EVICTION_COUNT = "primary.blockCacheEvictionCount"; + String BLOCK_CACHE_PRIMARY_EVICTION_COUNT_DESC = + "Count of the number of blocks evicted from primary replica in the block cache."; String BLOCK_CACHE_HIT_PERCENT = "blockCacheCountHitPercent"; String BLOCK_CACHE_HIT_PERCENT_DESC = "Percent of block cache requests that are hits"; diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java index f2bd8ff..c5cc276 100644 --- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java +++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java @@ -198,16 +198,37 @@ public interface MetricsRegionServerWrapper { long getBlockCacheHitCount(); /** + * Get the count of hits to primary replica in the block cache + */ + long getBlockCachePrimaryHitCount(); + + /** * Get the count of misses to the block cache. */ long getBlockCacheMissCount(); /** + * Get the count of misses to primary replica in the block cache. + */ + long getBlockCachePrimaryMissCount(); + + /** * Get the number of items evicted from the block cache. */ long getBlockCacheEvictedCount(); /** + * Get the number of items evicted from primary replica in the block cache. + */ + long getBlockCachePrimaryEvictedCount(); + + /** + * Get the number of items evicted from the L1 block cache. + */ + long getL1BlockCacheEvictedCount(); + + + /** * Get the percent of all requests that hit the block cache. */ double getBlockCacheHitPercent(); diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.java index 2ae8f4d..c37f8cb 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.java @@ -236,10 +236,18 @@ public class MetricsRegionServerSourceImpl rsWrap.getBlockCacheSize()) .addCounter(Interns.info(BLOCK_CACHE_HIT_COUNT, BLOCK_CACHE_HIT_COUNT_DESC), rsWrap.getBlockCacheHitCount()) + .addCounter(Interns.info(BLOCK_CACHE_PRIMARY_HIT_COUNT, BLOCK_CACHE_PRIMARY_HIT_COUNT_DESC), + rsWrap.getBlockCachePrimaryHitCount()) .addCounter(Interns.info(BLOCK_CACHE_MISS_COUNT, BLOCK_COUNT_MISS_COUNT_DESC), rsWrap.getBlockCacheMissCount()) + .addCounter(Interns.info(BLOCK_CACHE_PRIMARY_MISS_COUNT, BLOCK_COUNT_PRIMARY_MISS_COUNT_DESC), + rsWrap.getBlockCachePrimaryMissCount()) .addCounter(Interns.info(BLOCK_CACHE_EVICTION_COUNT, BLOCK_CACHE_EVICTION_COUNT_DESC), rsWrap.getBlockCacheEvictedCount()) + .addCounter(Interns.info(L1_BLOCK_CACHE_EVICTION_COUNT, L1_BLOCK_CACHE_EVICTION_COUNT_DESC), + rsWrap.getL1BlockCacheEvictedCount()) + .addCounter(Interns.info(BLOCK_CACHE_PRIMARY_EVICTION_COUNT, BLOCK_CACHE_PRIMARY_EVICTION_COUNT_DESC), + rsWrap.getBlockCachePrimaryEvictedCount()) .addGauge(Interns.info(BLOCK_CACHE_HIT_PERCENT, BLOCK_CACHE_HIT_PERCENT_DESC), rsWrap.getBlockCacheHitPercent()) .addGauge(Interns.info(BLOCK_CACHE_EXPRESS_HIT_PERCENT, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java index f918bdd..78cbf19 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java @@ -30,6 +30,7 @@ public class BlockCacheKey implements HeapSize, java.io.Serializable { private static final long serialVersionUID = -5199992013113130534L; private final String hfileName; private final long offset; + private final boolean isReplicaBlock; /** * Construct a new BlockCacheKey @@ -37,6 +38,11 @@ public class BlockCacheKey implements HeapSize, java.io.Serializable { * @param offset Offset of the block into the file */ public BlockCacheKey(String hfileName, long offset) { + this(hfileName, offset, false); + } + + public BlockCacheKey(String hfileName, long offset, boolean isReplica) { + this.isReplicaBlock = isReplica; this.hfileName = hfileName; this.offset = offset; } @@ -63,7 +69,7 @@ public class BlockCacheKey implements HeapSize, java.io.Serializable { return String.format("%s_%d", hfileName, offset); } - public static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + + public static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +Bytes.SIZEOF_BOOLEAN + ClassSize.REFERENCE + // this.hfileName Bytes.SIZEOF_LONG); // this.offset @@ -85,6 +91,10 @@ public class BlockCacheKey implements HeapSize, java.io.Serializable { return hfileName; } + public boolean isPrimary() { + return !isReplicaBlock; + } + public long getOffset() { return offset; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java index 4467fee..1ce2abb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java @@ -43,6 +43,9 @@ public class CacheStats { /** The number of getBlock requests that were cache hits */ private final AtomicLong hitCount = new AtomicLong(0); + /** The number of getBlock requests that were cache hits from primary replica */ + private final AtomicLong primaryHitCount = new AtomicLong(0); + /** * The number of getBlock requests that were cache hits, but only from * requests that were set to use the block cache. This is because all reads @@ -54,6 +57,8 @@ public class CacheStats { /** The number of getBlock requests that were cache misses */ private final AtomicLong missCount = new AtomicLong(0); + /** The number of getBlock requests for primary replica that were cache misses */ + private final AtomicLong primaryMissCount = new AtomicLong(0); /** * The number of getBlock requests that were cache misses, but only from * requests that were set to use the block cache. @@ -66,6 +71,9 @@ public class CacheStats { /** The total number of blocks that have been evicted */ private final AtomicLong evictedBlockCount = new AtomicLong(0); + /** The total number of blocks for primary replica that have been evicted */ + private final AtomicLong primaryEvictedBlockCount = new AtomicLong(0); + /** The number of metrics periods to include in window */ private final int numPeriodsInWindow; /** Hit counts for each period in window */ @@ -112,17 +120,25 @@ public class CacheStats { ", missCount=" + getMissCount() + ", missCachingCount=" + getMissCachingCount() + ", evictionCount=" + getEvictionCount() + ", evictedBlockCount=" + getEvictedCount() + + ", primaryMissCount=" + getMissCount(true) + + ", primaryHitCount=" + getHitCount(true) + ", evictedAgeMean=" + snapshot.getMean() + ", evictedAgeStdDev=" + snapshot.getStdDev(); } - public void miss(boolean caching) { + public void miss(boolean caching, boolean primary) { missCount.incrementAndGet(); + if (primary) primaryMissCount.incrementAndGet(); if (caching) missCachingCount.incrementAndGet(); } public void hit(boolean caching) { + hit(caching, true); + } + + public void hit(boolean caching, boolean primary) { hitCount.incrementAndGet(); + if (primary) primaryHitCount.incrementAndGet(); if (caching) hitCachingCount.incrementAndGet(); } @@ -130,9 +146,12 @@ public class CacheStats { evictionCount.incrementAndGet(); } - public void evicted(final long t) { + public void evicted(final long t, boolean primary) { if (t > this.startTime) this.ageAtEviction.update(t - this.startTime); this.evictedBlockCount.incrementAndGet(); + if (primary) { + primaryEvictedBlockCount.incrementAndGet(); + } } public long getRequestCount() { @@ -144,6 +163,12 @@ public class CacheStats { } public long getMissCount() { + return getMissCount(false); + } + public long getMissCount(boolean primary) { + if (primary) { + return primaryMissCount.get(); + } return missCount.get(); } @@ -152,6 +177,13 @@ public class CacheStats { } public long getHitCount() { + return getHitCount(false); + } + + public long getHitCount(boolean primary) { + if (primary) { + return primaryHitCount.get(); + } return hitCount.get(); } @@ -164,6 +196,11 @@ public class CacheStats { } public long getEvictedCount() { + return getEvictedCount(false); + } + + public long getEvictedCount(boolean primary) { + if (primary) return primaryEvictedBlockCount.get(); return this.evictedBlockCount.get(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java index 33b0d98..385cc87 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java @@ -131,7 +131,7 @@ public class CombinedBlockCache implements ResizableBlockCache, HeapSize { return lruCache.getBlockCount() + l2Cache.getBlockCount(); } - private static class CombinedCacheStats extends CacheStats { + public static class CombinedCacheStats extends CacheStats { private final CacheStats lruCacheStats; private final CacheStats bucketCacheStats; @@ -159,6 +159,11 @@ public class CombinedBlockCache implements ResizableBlockCache, HeapSize { } @Override + public long getMissCount(boolean primary) { + return lruCacheStats.getMissCount(primary) + bucketCacheStats.getMissCount(primary); + } + + @Override public long getMissCachingCount() { return lruCacheStats.getMissCachingCount() + bucketCacheStats.getMissCachingCount(); @@ -170,6 +175,10 @@ public class CombinedBlockCache implements ResizableBlockCache, HeapSize { } @Override + public long getHitCount(boolean primary) { + return lruCacheStats.getHitCount(primary) + bucketCacheStats.getHitCount(primary); + } + @Override public long getHitCachingCount() { return lruCacheStats.getHitCachingCount() + bucketCacheStats.getHitCachingCount(); @@ -181,6 +190,10 @@ public class CombinedBlockCache implements ResizableBlockCache, HeapSize { + bucketCacheStats.getEvictionCount(); } + public CacheStats getL1CacheStats() { + return lruCacheStats; + } + @Override public long getEvictedCount() { return lruCacheStats.getEvictedCount() @@ -188,6 +201,12 @@ public class CombinedBlockCache implements ResizableBlockCache, HeapSize { } @Override + public long getEvictedCount(boolean primary) { + return lruCacheStats.getEvictedCount(primary) + + bucketCacheStats.getEvictedCount(primary); + } + + @Override public double getHitRatioPastNPeriods() { double ratio = ((double) (lruCacheStats.getSumHitCountsPastNPeriods() + bucketCacheStats .getSumHitCountsPastNPeriods()) / (double) (lruCacheStats diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index 7dbad6c..aca56db 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -461,6 +461,10 @@ public class HFile { * Return the file context of the HFile this reader belongs to */ HFileContext getFileContext(); + + boolean isReplicaReader(); + + void setReplicaReader(boolean isReplicaReader); boolean shouldIncludeMemstoreTS(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java index 18b9b64..b340e9b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java @@ -87,6 +87,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { /** Filled when we read in the trailer. */ private final Compression.Algorithm compressAlgo; + private boolean isReplicaReader; + /** * What kind of data block encoding should be used while reading, writing, * and handling cache. @@ -439,6 +441,16 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { } @Override + public boolean isReplicaReader() { + return isReplicaReader; + } + + @Override + public void setReplicaReader(boolean isReplicaReader) { + this.isReplicaReader = isReplicaReader; + } + + @Override public FileInfo loadFileInfo() throws IOException { return fileInfo; } @@ -1374,7 +1386,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { .getRootBlockKey(block)) { // Check cache for block. If found return. long metaBlockOffset = metaBlockIndexReader.getRootBlockOffset(block); - BlockCacheKey cacheKey = new BlockCacheKey(name, metaBlockOffset); + BlockCacheKey cacheKey = new BlockCacheKey(name, metaBlockOffset, this.isReplicaReader()); cacheBlock &= cacheConf.shouldCacheDataOnRead(); if (cacheConf.isBlockCacheEnabled()) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java index dfbdc05..04983f6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java @@ -417,7 +417,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { boolean updateCacheMetrics) { LruCachedBlock cb = map.get(cacheKey); if (cb == null) { - if (!repeat && updateCacheMetrics) stats.miss(caching); + if (!repeat && updateCacheMetrics) stats.miss(caching, cacheKey.isPrimary()); // If there is another block cache then try and read there. // However if this is a retry ( second time in double checked locking ) // And it's already a miss then the l2 will also be a miss. @@ -432,7 +432,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { } return null; } - if (updateCacheMetrics) stats.hit(caching); + if (updateCacheMetrics) stats.hit(caching, cacheKey.isPrimary()); cb.access(count.incrementAndGet()); return cb.getBuffer(); } @@ -495,7 +495,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { long size = map.size(); assertCounterSanity(size, val); } - stats.evicted(block.getCachedTime()); + stats.evicted(block.getCachedTime(), block.getCacheKey().isPrimary()); if (evictedByEvictionProcess && victimHandler != null) { if (victimHandler instanceof BucketCache) { boolean wait = getCurrentSize() < acceptableSize(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java index 1eb7bfd..f820193 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java @@ -149,9 +149,9 @@ public class MemcachedBlockCache implements BlockCache { // Update stats if this request doesn't have it turned off 100% of the time if (updateCacheMetrics) { if (result == null) { - cacheStats.miss(caching); + cacheStats.miss(caching, cacheKey.isPrimary()); } else { - cacheStats.hit(caching); + cacheStats.hit(caching, cacheKey.isPrimary()); } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index 0595d04..5eb6f8f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -404,7 +404,7 @@ public class BucketCache implements BlockCache, HeapSize { RAMQueueEntry re = ramCache.get(key); if (re != null) { if (updateCacheMetrics) { - cacheStats.hit(caching); + cacheStats.hit(caching, key.isPrimary()); } re.access(accessCount.incrementAndGet()); return re.getData(); @@ -426,7 +426,7 @@ public class BucketCache implements BlockCache, HeapSize { bucketEntry.deserializerReference(this.deserialiserMap)); long timeTaken = System.nanoTime() - start; if (updateCacheMetrics) { - cacheStats.hit(caching); + cacheStats.hit(caching, key.isPrimary()); cacheStats.ioHit(timeTaken); } if (cachedBlock.getMemoryType() == MemoryType.SHARED) { @@ -448,7 +448,7 @@ public class BucketCache implements BlockCache, HeapSize { } } if (!repeat && updateCacheMetrics) { - cacheStats.miss(caching); + cacheStats.miss(caching, key.isPrimary()); } return null; } @@ -478,7 +478,7 @@ public class BucketCache implements BlockCache, HeapSize { BucketEntry bucketEntry = backingMap.get(cacheKey); if (bucketEntry == null) { if (removedBlock != null) { - cacheStats.evicted(0); + cacheStats.evicted(0, cacheKey.isPrimary()); return true; } else { return false; @@ -500,7 +500,7 @@ public class BucketCache implements BlockCache, HeapSize { offsetLock.releaseLockEntry(lockEntry); } } - cacheStats.evicted(bucketEntry.getCachedTime()); + cacheStats.evicted(bucketEntry.getCachedTime(), cacheKey.isPrimary()); return true; } @@ -521,7 +521,7 @@ public class BucketCache implements BlockCache, HeapSize { BucketEntry bucketEntry = backingMap.get(cacheKey); if (bucketEntry == null) { if (removedBlock != null) { - cacheStats.evicted(0); + cacheStats.evicted(0, cacheKey.isPrimary()); return true; } else { return false; @@ -561,7 +561,7 @@ public class BucketCache implements BlockCache, HeapSize { offsetLock.releaseLockEntry(lockEntry); } } - cacheStats.evicted(bucketEntry.getCachedTime()); + cacheStats.evicted(bucketEntry.getCachedTime(), cacheKey.isPrimary()); return true; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 840085d..834f03e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -667,7 +667,8 @@ public class HStore implements Store { info.setRegionCoprocessorHost(this.region.getCoprocessorHost()); StoreFile storeFile = new StoreFile(this.getFileSystem(), info, this.conf, this.cacheConf, this.family.getBloomFilterType()); - storeFile.createReader(); + StoreFile.Reader r = storeFile.createReader(); + r.setReplicaStoreFile(isReplicaStore()); return storeFile; } @@ -1134,7 +1135,7 @@ public class HStore implements Store { // but now we get them in ascending order, which I think is // actually more correct, since memstore get put at the end. List sfScanners = StoreFileScanner.getScannersForStoreFiles(storeFilesToScan, - cacheBlocks, usePread, isCompaction, false, matcher, readPt); + cacheBlocks, usePread, isCompaction, false, matcher, readPt, isReplicaStore()); List scanners = new ArrayList(sfScanners.size()+1); scanners.addAll(sfScanners); @@ -2280,4 +2281,9 @@ public class HStore implements Store { public double getCompactionPressure() { return storeEngine.getStoreFileManager().getCompactionPressure(); } + + @Override + public boolean isReplicaStore() { + return getRegionInfo().getReplicaId() != HRegionInfo.DEFAULT_REPLICA_ID; + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java index 040c9df..5c6cc90 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheStats; +import org.apache.hadoop.hbase.io.hfile.CombinedBlockCache.CombinedCacheStats; import org.apache.hadoop.hbase.wal.BoundedRegionGroupingProvider; import org.apache.hadoop.hbase.mob.MobCacheConfig; import org.apache.hadoop.hbase.mob.MobFileCache; @@ -287,6 +288,14 @@ class MetricsRegionServerWrapperImpl } @Override + public long getBlockCachePrimaryHitCount() { + if (this.cacheStats == null) { + return 0; + } + return this.cacheStats.getHitCount(true); + } + + @Override public long getBlockCacheMissCount() { if (this.cacheStats == null) { return 0; @@ -295,6 +304,14 @@ class MetricsRegionServerWrapperImpl } @Override + public long getBlockCachePrimaryMissCount() { + if (this.cacheStats == null) { + return 0; + } + return this.cacheStats.getMissCount(true); + } + + @Override public long getBlockCacheEvictedCount() { if (this.cacheStats == null) { return 0; @@ -303,6 +320,26 @@ class MetricsRegionServerWrapperImpl } @Override + public long getBlockCachePrimaryEvictedCount() { + if (this.cacheStats == null) { + return 0; + } + return this.cacheStats.getEvictedCount(true); + } + + @Override + public long getL1BlockCacheEvictedCount() { + if (this.cacheStats == null) { + return 0; + } + if (!(this.cacheStats instanceof CombinedCacheStats)) { + return this.cacheStats.getEvictedCount(); + } + CacheStats l1Stats = ((CombinedCacheStats)this.cacheStats).getL1CacheStats(); + return l1Stats.getEvictedCount(); + } + + @Override public double getBlockCacheHitPercent() { if (this.cacheStats == null) { return 0; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java index 2431044..b35cffd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java @@ -105,7 +105,7 @@ public interface Store extends HeapSize, StoreConfigInformation, PropagatingConf byte[] stopRow, long readPt ) throws IOException; - + ScanInfo getScanInfo(); /** @@ -450,4 +450,5 @@ public interface Store extends HeapSize, StoreConfigInformation, PropagatingConf void refreshStoreFiles(Collection newFiles) throws IOException; void bulkLoadHFile(StoreFileInfo fileInfo) throws IOException; + boolean isReplicaStore(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index da1b084..bb0608c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -1118,6 +1118,13 @@ public class StoreFile { bloomFilterType = BloomType.NONE; } + public void setReplicaStoreFile(boolean isReplicaStoreFile) { + reader.setReplicaReader(isReplicaStoreFile); + } + public boolean isReplicaReader() { + return reader.isReplicaReader(); + } + /** * ONLY USE DEFAULT CONSTRUCTOR FOR UNIT TESTS */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java index 9c04838..47c5015 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java @@ -78,6 +78,10 @@ public class StoreFileScanner implements KeyValueScanner { this.hasMVCCInfo = hasMVCC; } + boolean getIsReplica() { + return reader.isReplicaReader(); + } + /** * Return an array of scanners corresponding to the given * set of store files. @@ -108,11 +112,12 @@ public class StoreFileScanner implements KeyValueScanner { public static List getScannersForStoreFiles( Collection files, boolean cacheBlocks, boolean usePread, boolean isCompaction, boolean canUseDrop, - ScanQueryMatcher matcher, long readPt) throws IOException { + ScanQueryMatcher matcher, long readPt, boolean isReplica) throws IOException { List scanners = new ArrayList( files.size()); for (StoreFile file : files) { StoreFile.Reader r = file.createReader(canUseDrop); + r.setReplicaStoreFile(isReplica); StoreFileScanner scanner = r.getStoreFileScanner(cacheBlocks, usePread, isCompaction, readPt); scanner.setScanQueryMatcher(matcher); @@ -121,6 +126,14 @@ public class StoreFileScanner implements KeyValueScanner { return scanners; } + public static List getScannersForStoreFiles( + Collection files, boolean cacheBlocks, boolean usePread, + boolean isCompaction, boolean canUseDrop, + ScanQueryMatcher matcher, long readPt) throws IOException { + return getScannersForStoreFiles(files, cacheBlocks, usePread, isCompaction, canUseDrop, + matcher, readPt, false); + } + public String toString() { return "StoreFileScanner[" + hfs.toString() + ", cur=" + cur + "]"; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java index cff88d5..9a548f5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java @@ -619,18 +619,18 @@ public class TestLruBlockCache { // should be (2/4)=0.5 and (1/1)=1 stats.hit(false); stats.hit(true); - stats.miss(false); - stats.miss(false); + stats.miss(false, false); + stats.miss(false, false); stats.rollMetricsPeriod(); assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta); assertEquals(1.0, stats.getHitCachingRatioPastNPeriods(), delta); // period 2, 1 miss caching, 3 miss non-caching // should be (2/8)=0.25 and (1/2)=0.5 - stats.miss(true); - stats.miss(false); - stats.miss(false); - stats.miss(false); + stats.miss(true, false); + stats.miss(false, false); + stats.miss(false, false); + stats.miss(false, false); stats.rollMetricsPeriod(); assertEquals(0.25, stats.getHitRatioPastNPeriods(), delta); assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta); @@ -647,16 +647,16 @@ public class TestLruBlockCache { // period 4, evict period 1, two caching misses // should be (4/10)=0.4 and (2/5)=0.4 - stats.miss(true); - stats.miss(true); + stats.miss(true, false); + stats.miss(true, false); stats.rollMetricsPeriod(); assertEquals(0.4, stats.getHitRatioPastNPeriods(), delta); assertEquals(0.4, stats.getHitCachingRatioPastNPeriods(), delta); // period 5, evict period 2, 2 caching misses, 2 non-caching hit // should be (6/10)=0.6 and (2/6)=1/3 - stats.miss(true); - stats.miss(true); + stats.miss(true, false); + stats.miss(true, false); stats.hit(false); stats.hit(false); stats.rollMetricsPeriod(); @@ -683,8 +683,8 @@ public class TestLruBlockCache { // period 9, one of each // should be (2/4)=0.5 and (1/2)=0.5 - stats.miss(true); - stats.miss(false); + stats.miss(true, false); + stats.miss(false, false); stats.hit(true); stats.hit(false); stats.rollMetricsPeriod(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperStub.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperStub.java index bc5f494..372e37c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperStub.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperStub.java @@ -176,16 +176,36 @@ public class MetricsRegionServerWrapperStub implements MetricsRegionServerWrappe } @Override + public long getBlockCachePrimaryHitCount() { + return 422; + } + + @Override public long getBlockCacheMissCount() { return 417; } @Override + public long getBlockCachePrimaryMissCount() { + return 421; + } + + @Override public long getBlockCacheEvictedCount() { return 418; } @Override + public long getBlockCachePrimaryEvictedCount() { + return 420; + } + + @Override + public long getL1BlockCacheEvictedCount() { + return 420L; + } + + @Override public double getBlockCacheHitPercent() { return 98; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java index 5a92846..f4d9dac 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java @@ -524,13 +524,13 @@ public class TestHeapMemoryManager { @Override public boolean evictBlock(BlockCacheKey cacheKey) { - stats.evicted(0); + stats.evicted(0, cacheKey != null ? cacheKey.isPrimary() : true); return false; } @Override public int evictBlocksByHfileName(String hfileName) { - stats.evicted(0); // Just assuming only one block for file here. + stats.evicted(0, true); // Just assuming only one block for file here. return 0; }