diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FirstLevelBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FirstLevelBlockCache.java index a0c34c9fe3ef..937b12a22142 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FirstLevelBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FirstLevelBlockCache.java @@ -17,14 +17,34 @@ */ package org.apache.hadoop.hbase.io.hfile; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; import org.apache.hadoop.hbase.io.HeapSize; +import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.yetus.audience.InterfaceAudience; /** * In-memory BlockCache that may be backed by secondary layer(s). */ @InterfaceAudience.Private -public interface FirstLevelBlockCache extends ResizableBlockCache, HeapSize { +public abstract class FirstLevelBlockCache implements ResizableBlockCache, HeapSize { + + /* Statistics thread */ + protected static String STAT_THREAD_ENABLE_KEY = "hbase.lru.stat.enable"; + protected static boolean STAT_THREAD_ENABLE_DEFAULT = false; + protected static final int STAT_THREAD_PERIOD = 60 * 5; + + protected transient ScheduledExecutorService statsThreadPool; + + FirstLevelBlockCache(boolean statEnabled) { + if (statEnabled) { + this.statsThreadPool = Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder() + .setNameFormat("LruBlockCacheStatsExecutor").setDaemon(true).build()); + this.statsThreadPool.scheduleAtFixedRate(new LruBlockCache.StatisticsThread(this), + STAT_THREAD_PERIOD, STAT_THREAD_PERIOD, TimeUnit.SECONDS); + } + } /** * Whether the cache contains the block with specified cacheKey @@ -32,7 +52,7 @@ public interface FirstLevelBlockCache extends ResizableBlockCache, HeapSize { * @param cacheKey cache key for the block * @return true if it contains the block */ - boolean containsBlock(BlockCacheKey cacheKey); + abstract boolean containsBlock(BlockCacheKey cacheKey); /** * Specifies the secondary cache. An entry that is evicted from this cache due to a size @@ -41,5 +61,42 @@ public interface FirstLevelBlockCache extends ResizableBlockCache, HeapSize { * @param victimCache the second level cache * @throws IllegalArgumentException if the victim cache had already been set */ - void setVictimCache(BlockCache victimCache); + abstract void setVictimCache(BlockCache victimCache); + + public void shutdown() { + if (statsThreadPool != null) { + this.statsThreadPool.shutdown(); + for (int i = 0; i < 10; i++) { + if (!this.statsThreadPool.isShutdown()) { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + break; + } + } + } + } + } + + /* + * Statistics thread. Periodically prints the cache statistics to the log. + */ + static class StatisticsThread extends Thread { + + private final FirstLevelBlockCache l1; + + public StatisticsThread(FirstLevelBlockCache l1) { + super("LruBlockCacheStats"); + setDaemon(true); + this.l1 = l1; + } + + @Override + public void run() { + l1.logStats(); + } + } + + protected abstract void logStats(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruAdaptiveBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruAdaptiveBlockCache.java index 494a588aadb8..7fddd787f42c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruAdaptiveBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruAdaptiveBlockCache.java @@ -22,15 +22,9 @@ import java.lang.ref.WeakReference; import java.util.EnumMap; import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.PriorityQueue; -import java.util.SortedSet; -import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.ReentrantLock; @@ -46,7 +40,6 @@ import org.apache.hbase.thirdparty.com.google.common.base.MoreObjects; import org.apache.hbase.thirdparty.com.google.common.base.Objects; -import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * This realisation improve performance of classical LRU @@ -146,7 +139,7 @@ * Find more information about improvement: https://issues.apache.org/jira/browse/HBASE-23887 */ @InterfaceAudience.Private -public class LruAdaptiveBlockCache implements FirstLevelBlockCache { +public class LruAdaptiveBlockCache extends FirstLevelBlockCache { private static final Logger LOG = LoggerFactory.getLogger(LruAdaptiveBlockCache.class); @@ -243,11 +236,6 @@ public class LruAdaptiveBlockCache implements FirstLevelBlockCache { /** Eviction thread */ private transient final EvictionThread evictionThread; - /** Statistics thread schedule pool (for heavy debugging, could remove) */ - private transient final ScheduledExecutorService scheduleThreadPool = - Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder() - .setNameFormat("LruAdaptiveBlockCacheStatsExecutor").setDaemon(true).build()); - /** Current size of cache */ private final AtomicLong size; @@ -345,7 +333,8 @@ public LruAdaptiveBlockCache(long maxSize, long blockSize, boolean evictionThrea DEFAULT_MAX_BLOCK_SIZE, DEFAULT_LRU_CACHE_HEAVY_EVICTION_COUNT_LIMIT, DEFAULT_LRU_CACHE_HEAVY_EVICTION_MB_SIZE_LIMIT, - DEFAULT_LRU_CACHE_HEAVY_EVICTION_OVERHEAD_COEFFICIENT); + DEFAULT_LRU_CACHE_HEAVY_EVICTION_OVERHEAD_COEFFICIENT, + STAT_THREAD_ENABLE_DEFAULT); } public LruAdaptiveBlockCache(long maxSize, long blockSize, @@ -368,7 +357,8 @@ public LruAdaptiveBlockCache(long maxSize, long blockSize, conf.getLong(LRU_CACHE_HEAVY_EVICTION_MB_SIZE_LIMIT, DEFAULT_LRU_CACHE_HEAVY_EVICTION_MB_SIZE_LIMIT), conf.getFloat(LRU_CACHE_HEAVY_EVICTION_OVERHEAD_COEFFICIENT, - DEFAULT_LRU_CACHE_HEAVY_EVICTION_OVERHEAD_COEFFICIENT)); + DEFAULT_LRU_CACHE_HEAVY_EVICTION_OVERHEAD_COEFFICIENT), + conf.getBoolean(STAT_THREAD_ENABLE_KEY, STAT_THREAD_ENABLE_DEFAULT)); } public LruAdaptiveBlockCache(long maxSize, long blockSize, Configuration conf) { @@ -397,12 +387,13 @@ public LruAdaptiveBlockCache(long maxSize, long blockSize, Configuration conf) { * @param heavyEvictionOverheadCoefficient how aggressive AdaptiveLRU will reduce GC */ public LruAdaptiveBlockCache(long maxSize, long blockSize, boolean evictionThread, - int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel, - float minFactor, float acceptableFactor, float singleFactor, - float multiFactor, float memoryFactor, float hardLimitFactor, - boolean forceInMemory, long maxBlockSize, - int heavyEvictionCountLimit, long heavyEvictionMbSizeLimit, - float heavyEvictionOverheadCoefficient) { + int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel, + float minFactor, float acceptableFactor, float singleFactor, + float multiFactor, float memoryFactor, float hardLimitFactor, + boolean forceInMemory, long maxBlockSize, + int heavyEvictionCountLimit, long heavyEvictionMbSizeLimit, + float heavyEvictionOverheadCoefficient, boolean statEnabled) { + super(statEnabled); this.maxBlockSize = maxBlockSize; if(singleFactor + multiFactor + memoryFactor != 1 || singleFactor < 0 || multiFactor < 0 || memoryFactor < 0) { @@ -446,11 +437,6 @@ public LruAdaptiveBlockCache(long maxSize, long blockSize, boolean evictionThrea heavyEvictionOverheadCoefficient = Math.min(heavyEvictionOverheadCoefficient, 1.0f); heavyEvictionOverheadCoefficient = Math.max(heavyEvictionOverheadCoefficient, 0.001f); this.heavyEvictionOverheadCoefficient = heavyEvictionOverheadCoefficient; - - // TODO: Add means of turning this off. Bit obnoxious running thread just to make a log - // every five minutes. - this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this), STAT_THREAD_PERIOD, - STAT_THREAD_PERIOD, TimeUnit.SECONDS); } @Override @@ -1198,26 +1184,8 @@ boolean isEnteringRun() { } } - /* - * Statistics thread. Periodically prints the cache statistics to the log. - */ - static class StatisticsThread extends Thread { - - private final LruAdaptiveBlockCache lru; - - public StatisticsThread(LruAdaptiveBlockCache lru) { - super("LruAdaptiveBlockCacheStats"); - setDaemon(true); - this.lru = lru; - } - - @Override - public void run() { - lru.logStats(); - } - } - - public void logStats() { + @Override + protected void logStats() { // Log size long totalSize = heapSize(); long freeSize = maxSize - totalSize; @@ -1375,26 +1343,10 @@ private long memorySize() { @Override public void shutdown() { + super.shutdown(); if (victimHandler != null) { victimHandler.shutdown(); } - this.scheduleThreadPool.shutdown(); - for (int i = 0; i < 10; i++) { - if (!this.scheduleThreadPool.isShutdown()) { - try { - Thread.sleep(10); - } catch (InterruptedException e) { - LOG.warn("Interrupted while sleeping"); - Thread.currentThread().interrupt(); - break; - } - } - } - - if (!this.scheduleThreadPool.isShutdown()) { - List runnables = this.scheduleThreadPool.shutdownNow(); - LOG.debug("Still running " + runnables); - } this.evictionThread.shutdown(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java index 3e5ba1d19c56..4f18396f4c0b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java @@ -22,15 +22,11 @@ import java.lang.ref.WeakReference; import java.util.EnumMap; import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.PriorityQueue; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.ReentrantLock; @@ -45,7 +41,6 @@ import org.apache.hbase.thirdparty.com.google.common.base.MoreObjects; import org.apache.hbase.thirdparty.com.google.common.base.Objects; -import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * A block cache implementation that is memory-aware using {@link HeapSize}, memory-bound using an @@ -85,7 +80,7 @@ * sizes and usage. */ @InterfaceAudience.Private -public class LruBlockCache implements FirstLevelBlockCache { +public class LruBlockCache extends FirstLevelBlockCache { private static final Logger LOG = LoggerFactory.getLogger(LruBlockCache.class); @@ -140,8 +135,6 @@ public class LruBlockCache implements FirstLevelBlockCache { private static final boolean DEFAULT_IN_MEMORY_FORCE_MODE = false; - /* Statistics thread */ - private static final int STAT_THREAD_PERIOD = 60 * 5; private static final String LRU_MAX_BLOCK_SIZE = "hbase.lru.max.block.size"; private static final long DEFAULT_MAX_BLOCK_SIZE = 16L * 1024L * 1024L; @@ -165,11 +158,6 @@ public class LruBlockCache implements FirstLevelBlockCache { /** Eviction thread */ private transient final EvictionThread evictionThread; - /** Statistics thread schedule pool (for heavy debugging, could remove) */ - private transient final ScheduledExecutorService scheduleThreadPool = - Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder() - .setNameFormat("LruBlockCacheStatsExecutor").setDaemon(true).build()); - /** Current size of cache */ private final AtomicLong size; @@ -252,7 +240,8 @@ public LruBlockCache(long maxSize, long blockSize, boolean evictionThread) { DEFAULT_MEMORY_FACTOR, DEFAULT_HARD_CAPACITY_LIMIT_FACTOR, false, - DEFAULT_MAX_BLOCK_SIZE); + DEFAULT_MAX_BLOCK_SIZE, + true); } public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Configuration conf) { @@ -268,7 +257,8 @@ public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Confi conf.getFloat(LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME, DEFAULT_HARD_CAPACITY_LIMIT_FACTOR), conf.getBoolean(LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME, DEFAULT_IN_MEMORY_FORCE_MODE), - conf.getLong(LRU_MAX_BLOCK_SIZE, DEFAULT_MAX_BLOCK_SIZE)); + conf.getLong(LRU_MAX_BLOCK_SIZE, DEFAULT_MAX_BLOCK_SIZE), + conf.getBoolean(STAT_THREAD_ENABLE_KEY, STAT_THREAD_ENABLE_DEFAULT)); } public LruBlockCache(long maxSize, long blockSize, Configuration conf) { @@ -294,7 +284,8 @@ public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel, float minFactor, float acceptableFactor, float singleFactor, float multiFactor, float memoryFactor, float hardLimitFactor, - boolean forceInMemory, long maxBlockSize) { + boolean forceInMemory, long maxBlockSize, boolean statEnable) { + super(statEnable); this.maxBlockSize = maxBlockSize; if(singleFactor + multiFactor + memoryFactor != 1 || singleFactor < 0 || multiFactor < 0 || memoryFactor < 0) { @@ -330,10 +321,6 @@ public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, } else { this.evictionThread = null; } - // TODO: Add means of turning this off. Bit obnoxious running thread just to make a log - // every five minutes. - this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this), STAT_THREAD_PERIOD, - STAT_THREAD_PERIOD, TimeUnit.SECONDS); } @Override @@ -974,26 +961,8 @@ boolean isEnteringRun() { } } - /* - * Statistics thread. Periodically prints the cache statistics to the log. - */ - static class StatisticsThread extends Thread { - - private final LruBlockCache lru; - - public StatisticsThread(LruBlockCache lru) { - super("LruBlockCacheStats"); - setDaemon(true); - this.lru = lru; - } - - @Override - public void run() { - lru.logStats(); - } - } - - public void logStats() { + @Override + protected void logStats() { // Log size long totalSize = heapSize(); long freeSize = maxSize - totalSize; @@ -1151,26 +1120,10 @@ private long memorySize() { @Override public void shutdown() { + super.shutdown(); if (victimHandler != null) { victimHandler.shutdown(); } - this.scheduleThreadPool.shutdown(); - for (int i = 0; i < 10; i++) { - if (!this.scheduleThreadPool.isShutdown()) { - try { - Thread.sleep(10); - } catch (InterruptedException e) { - LOG.warn("Interrupted while sleeping"); - Thread.currentThread().interrupt(); - break; - } - } - } - - if (!this.scheduleThreadPool.isShutdown()) { - List runnables = this.scheduleThreadPool.shutdownNow(); - LOG.debug("Still running " + runnables); - } this.evictionThread.shutdown(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/TinyLfuBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/TinyLfuBlockCache.java index e5e2e8fb6320..23d5a094ae07 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/TinyLfuBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/TinyLfuBlockCache.java @@ -22,9 +22,6 @@ import java.util.Comparator; import java.util.Iterator; import java.util.concurrent.Executor; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; @@ -37,7 +34,6 @@ import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache; import org.apache.hadoop.util.StringUtils; import org.apache.hbase.thirdparty.com.google.common.base.MoreObjects; -import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; @@ -54,15 +50,13 @@ * */ @InterfaceAudience.Private -public final class TinyLfuBlockCache implements FirstLevelBlockCache { +public final class TinyLfuBlockCache extends FirstLevelBlockCache { private static final Logger LOG = LoggerFactory.getLogger(TinyLfuBlockCache.class); private static final String MAX_BLOCK_SIZE = "hbase.tinylfu.max.block.size"; private static final long DEFAULT_MAX_BLOCK_SIZE = 16L * 1024L * 1024L; - private static final int STAT_THREAD_PERIOD_SECONDS = 5 * 60; private transient final Eviction policy; - private transient final ScheduledExecutorService statsThreadPool; private final long maxBlockSize; private final CacheStats stats; @@ -81,7 +75,8 @@ public final class TinyLfuBlockCache implements FirstLevelBlockCache { public TinyLfuBlockCache(long maximumSizeInBytes, long avgBlockSize, Executor executor, Configuration conf) { this(maximumSizeInBytes, avgBlockSize, - conf.getLong(MAX_BLOCK_SIZE, DEFAULT_MAX_BLOCK_SIZE), executor); + conf.getLong(MAX_BLOCK_SIZE, DEFAULT_MAX_BLOCK_SIZE), executor, + conf.getBoolean(STAT_THREAD_ENABLE_KEY, STAT_THREAD_ENABLE_DEFAULT)); } /** @@ -92,8 +87,9 @@ public TinyLfuBlockCache(long maximumSizeInBytes, long avgBlockSize, * @param maxBlockSize maximum size of a block, in bytes * @param executor the cache's executor */ - public TinyLfuBlockCache(long maximumSizeInBytes, - long avgBlockSize, long maxBlockSize, Executor executor) { + public TinyLfuBlockCache(long maximumSizeInBytes, long avgBlockSize, long maxBlockSize, + Executor executor, boolean statEnable) { + super(statEnable); this.cache = Caffeine.newBuilder() .executor(executor) .maximumWeight(maximumSizeInBytes) @@ -105,11 +101,6 @@ public TinyLfuBlockCache(long maximumSizeInBytes, this.maxBlockSize = maxBlockSize; this.policy = cache.policy().eviction().get(); this.stats = new CacheStats(getClass().getSimpleName()); - - statsThreadPool = Executors.newSingleThreadScheduledExecutor(new ThreadFactoryBuilder() - .setNameFormat("TinyLfuBlockCacheStatsExecutor").setDaemon(true).build()); - statsThreadPool.scheduleAtFixedRate(this::logStats, - STAT_THREAD_PERIOD_SECONDS, STAT_THREAD_PERIOD_SECONDS, TimeUnit.SECONDS); } @Override @@ -261,10 +252,10 @@ public CacheStats getStats() { @Override public void shutdown() { + super.shutdown(); if (victimCache != null) { victimCache.shutdown(); } - statsThreadPool.shutdown(); } @Override @@ -280,7 +271,8 @@ public Iterator iterator() { .iterator(); } - private void logStats() { + @Override + protected void logStats() { LOG.info( "totalSize=" + StringUtils.byteDesc(heapSize()) + ", " + "freeSize=" + StringUtils.byteDesc(getFreeSize()) + ", " + diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java index f29d12ac315b..680a72b037b8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java @@ -356,7 +356,8 @@ public void testCacheEvictionThreePriorities() throws Exception { 16 * 1024 * 1024, 10, 500, - 0.01f); + 0.01f, + false); CachedItem [] singleBlocks = generateFixedBlocks(5, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi"); @@ -493,7 +494,8 @@ public void testCacheEvictionInMemoryForceMode() throws Exception { 16 * 1024 * 1024, 10, 500, - 0.01f); + 0.01f, + false); CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi"); @@ -614,7 +616,8 @@ public void testScanResistance() throws Exception { 16 * 1024 * 1024, 10, 500, - 0.01f); + 0.01f, + false); CachedItem [] singleBlocks = generateFixedBlocks(20, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi"); @@ -685,7 +688,8 @@ public void testMaxBlockSize() throws Exception { 1024, 10, 500, - 0.01f); + 0.01f, + false); CachedItem [] tooLong = generateFixedBlocks(10, 1024+5, "long"); CachedItem [] small = generateFixedBlocks(15, 600, "small"); @@ -729,7 +733,8 @@ public void testResizeBlockCache() throws Exception { 16 * 1024 * 1024, 10, 500, - 0.01f); + 0.01f, + false); CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi"); @@ -902,7 +907,8 @@ public void testCacheBlockNextBlockMetadataMissing() { 1024, 10, 500, - 0.01f); + 0.01f, + false); BlockCacheKey key = new BlockCacheKey("key1", 0); ByteBuffer actualBuffer = ByteBuffer.allocate(length); @@ -1096,7 +1102,8 @@ public void testMultiThreadGetAndEvictBlock() throws Exception { false, 1024, 10, 500, - 0.01f); + 0.01f, + false); testMultiThreadGetAndEvictBlockInternal(cache); } @@ -1121,7 +1128,8 @@ public void testSkipCacheDataBlocksInteral(int heavyEvictionCountLimit) throws E maxSize, heavyEvictionCountLimit, 200, - 0.01f); + 0.01f, + false); EvictionThread evictionThread = cache.getEvictionThread(); assertNotNull(evictionThread); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java index af70f3db7cc4..db7daac2bbcb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java @@ -342,7 +342,8 @@ public void testCacheEvictionThreePriorities() throws Exception { 0.34f, // memory 1.2f, // limit false, - 16 * 1024 * 1024); + 16 * 1024 * 1024, + true); CachedItem [] singleBlocks = generateFixedBlocks(5, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi"); @@ -464,7 +465,8 @@ public void testCacheEvictionInMemoryForceMode() throws Exception { 0.5f, // memory 1.2f, // limit true, - 16 * 1024 * 1024); + 16 * 1024 * 1024, + true); CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi"); @@ -571,7 +573,8 @@ public void testScanResistance() throws Exception { 0.34f, // memory 1.2f, // limit false, - 16 * 1024 * 1024); + 16 * 1024 * 1024, + true); CachedItem [] singleBlocks = generateFixedBlocks(20, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi"); @@ -635,7 +638,8 @@ public void testMaxBlockSize() throws Exception { 0.34f, // memory 1.2f, // limit false, - 1024); + 1024, + true); CachedItem [] tooLong = generateFixedBlocks(10, 1024+5, "long"); CachedItem [] small = generateFixedBlocks(15, 600, "small"); @@ -675,7 +679,8 @@ public void testResizeBlockCache() throws Exception { 0.34f, // memory 1.2f, // limit false, - 16 * 1024 * 1024); + 16 * 1024 * 1024, + true); CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi"); @@ -837,7 +842,8 @@ public void testCacheBlockNextBlockMetadataMissing() { 0.34f, // memory 1.2f, // limit false, - 1024); + 1024, + true); BlockCacheKey key = new BlockCacheKey("key1", 0); ByteBuffer actualBuffer = ByteBuffer.allocate(length); @@ -1026,7 +1032,8 @@ public void testMultiThreadGetAndEvictBlock() throws Exception { 0.33f, // multi 0.34f, // memory 1.2f, // limit - false, 1024); + false, 1024, + true); testMultiThreadGetAndEvictBlockInternal(cache); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java index bbe3182b4e4b..cf623a1a7c1b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java @@ -51,7 +51,8 @@ public void testCacheSimple() throws Exception { long maxSize = 1000000; long blockSize = calculateBlockSizeDefault(maxSize, 101); - TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run); + TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, + Runnable::run, false); CachedItem [] blocks = generateRandomBlocks(100, blockSize); @@ -107,7 +108,8 @@ public void testCacheEvictionSimple() throws Exception { long maxSize = 100000; long blockSize = calculateBlockSizeDefault(maxSize, 10); - TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run); + TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, + Runnable::run, false); CachedItem [] blocks = generateFixedBlocks(11, blockSize, "block"); @@ -132,7 +134,8 @@ public void testScanResistance() throws Exception { long maxSize = 100000; long blockSize = calculateBlockSize(maxSize, 10); - TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run); + TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, + Runnable::run, false); CachedItem [] singleBlocks = generateFixedBlocks(20, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi"); @@ -172,7 +175,8 @@ public void testMaxBlockSize() throws Exception { long maxSize = 100000; long blockSize = calculateBlockSize(maxSize, 10); - TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run); + TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, + Runnable::run, false); CachedItem [] tooLong = generateFixedBlocks(10, 2 * blockSize, "long"); CachedItem [] small = generateFixedBlocks(15, blockSize / 2, "small"); @@ -199,7 +203,8 @@ public void testResizeBlockCache() throws Exception { long maxSize = 100000; long blockSize = calculateBlockSize(maxSize, 10); - TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, Runnable::run); + TinyLfuBlockCache cache = new TinyLfuBlockCache(maxSize, blockSize, blockSize, + Runnable::run, false); CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block");