Skip to content

Commit 65ad4f7

Browse files
committed
Merge branch 'master' into HBASE-26342
2 parents dcd118d + 26ab9d0 commit 65ad4f7

File tree

15 files changed

+156
-48
lines changed

15 files changed

+156
-48
lines changed

bin/hbase

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -492,8 +492,16 @@ add_jdk11_deps_to_classpath() {
492492
done
493493
}
494494

495-
enable_trace() {
496-
agent_jar=$(find lib/trace -type f -name "opentelemetry-javaagent-*")
495+
add_opentelemetry_agent() {
496+
if ! agent_jar=$(find lib/trace -type f -name "opentelemetry-javaagent-*" 2>/dev/null); then
497+
# must be dev environment
498+
f="${HBASE_HOME}/hbase-build-configuration/target/cached_classpath.txt"
499+
if [ ! -f "${f}" ]; then
500+
echo "As this is a development environment, we need ${f} to be generated from maven (command: mvn install -DskipTests)"
501+
exit 1
502+
fi
503+
agent_jar=$(tr ':' '\n' < "${f}" | grep opentelemetry-javaagent)
504+
fi
497505
HBASE_OPTS="$HBASE_OPTS -javaagent:$agent_jar $HBASE_TRACE_OPTS"
498506
}
499507

@@ -801,8 +809,10 @@ elif [ "${DEBUG}" = "true" ]; then
801809
fi
802810

803811
if [[ -n "${HBASE_TRACE_OPTS}" ]]; then
804-
echo "Attach opentelemetry agent to enable trace"
805-
enable_trace
812+
if [ "${DEBUG}" = "true" ]; then
813+
echo "Attaching opentelemetry agent"
814+
fi
815+
add_opentelemetry_agent
806816
fi
807817

808818
# Have JVM dump heap if we run out of memory. Files will be 'launch directory'

conf/hbase-env.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@
146146
# Uncomment to enable trace, you can change the options to use other exporters such as jaeger or
147147
# zipkin. See https://github.com/open-telemetry/opentelemetry-java-instrumentation on how to
148148
# configure exporters and other components through system properties.
149-
# export HBASE_TRACE_OPTS="-Dotel.resource.attributes=service.name=HBase -Dotel.traces.exporter=logging otel.metrics.exporter=none"
149+
# export HBASE_TRACE_OPTS="-Dotel.resource.attributes=service.name=HBase -Dotel.traces.exporter=logging -Dotel.metrics.exporter=none"
150150

151151
# Additional argments passed to jshell invocation
152152
# export HBASE_JSHELL_ARGS="--startup DEFAULT --startup PRINTING --startup hbase_startup.jsh"

hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -370,7 +370,10 @@ public class HMaster extends HBaseServerBase<MasterRpcServices> implements Maste
370370

371371
private HbckChore hbckChore;
372372
CatalogJanitor catalogJanitorChore;
373-
private DirScanPool cleanerPool;
373+
// Threadpool for scanning the archive directory, used by the HFileCleaner
374+
private DirScanPool hfileCleanerPool;
375+
// Threadpool for scanning the Old logs directory, used by the LogCleaner
376+
private DirScanPool logCleanerPool;
374377
private LogCleaner logCleaner;
375378
private HFileCleaner hfileCleaner;
376379
private HFileCleaner[] customCleaners;
@@ -1137,7 +1140,8 @@ private void finishActiveMasterInitialization(MonitoredTask status) throws IOExc
11371140
(EnvironmentEdgeManager.currentTime() - masterActiveTime) / 1000.0f));
11381141
this.masterFinishedInitializationTime = EnvironmentEdgeManager.currentTime();
11391142
configurationManager.registerObserver(this.balancer);
1140-
configurationManager.registerObserver(this.cleanerPool);
1143+
configurationManager.registerObserver(this.hfileCleanerPool);
1144+
configurationManager.registerObserver(this.logCleanerPool);
11411145
configurationManager.registerObserver(this.hfileCleaner);
11421146
configurationManager.registerObserver(this.logCleaner);
11431147
configurationManager.registerObserver(this.regionsRecoveryConfigManager);
@@ -1507,15 +1511,16 @@ private void startServiceThreads() throws IOException {
15071511
ExecutorType.MASTER_TABLE_OPERATIONS).setCorePoolSize(1));
15081512
startProcedureExecutor();
15091513

1510-
// Create cleaner thread pool
1511-
cleanerPool = new DirScanPool(conf);
1514+
// Create log cleaner thread pool
1515+
logCleanerPool = DirScanPool.getLogCleanerScanPool(conf);
15121516
Map<String, Object> params = new HashMap<>();
15131517
params.put(MASTER, this);
15141518
// Start log cleaner thread
15151519
int cleanerInterval =
15161520
conf.getInt(HBASE_MASTER_CLEANER_INTERVAL, DEFAULT_HBASE_MASTER_CLEANER_INTERVAL);
15171521
this.logCleaner = new LogCleaner(cleanerInterval, this, conf,
1518-
getMasterWalManager().getFileSystem(), getMasterWalManager().getOldLogDir(), cleanerPool, params);
1522+
getMasterWalManager().getFileSystem(), getMasterWalManager().getOldLogDir(),
1523+
logCleanerPool, params);
15191524
getChoreService().scheduleChore(logCleaner);
15201525

15211526
Path archiveDir = HFileArchiveUtil.getArchivePath(conf);
@@ -1532,7 +1537,7 @@ private void startServiceThreads() throws IOException {
15321537
cleanerClasses.toArray(new String[cleanerClasses.size()]));
15331538
LOG.info("Custom cleaner paths: {}, plugins: {}", Arrays.asList(paths), cleanerClasses);
15341539
}
1535-
customCleanerPool = new DirScanPool(conf.get(CleanerChore.CUSTOM_POOL_SIZE, "6"));
1540+
customCleanerPool = DirScanPool.getHFileCleanerScanPool(conf);
15361541
customPaths = new Path[paths.length];
15371542
customCleaners = new HFileCleaner[paths.length];
15381543
for (int i = 0; i < paths.length; i++) {
@@ -1548,9 +1553,10 @@ conf, getMasterFileSystem().getFileSystem(), new Path(archiveDir, path),
15481553
}
15491554
}
15501555

1551-
// start the hfile archive cleaner thread
1556+
// Create archive cleaner thread pool
1557+
hfileCleanerPool = DirScanPool.getHFileCleanerScanPool(conf);
15521558
this.hfileCleaner = new HFileCleaner(cleanerInterval, this, conf,
1553-
getMasterFileSystem().getFileSystem(), archiveDir, cleanerPool, params, customPaths);
1559+
getMasterFileSystem().getFileSystem(), archiveDir, hfileCleanerPool, params);
15541560
getChoreService().scheduleChore(hfileCleaner);
15551561

15561562
// Regions Reopen based on very high storeFileRefCount is considered enabled
@@ -1600,9 +1606,13 @@ protected void stopServiceThreads() {
16001606
}
16011607
stopChoreService();
16021608
stopExecutorService();
1603-
if (cleanerPool != null) {
1604-
cleanerPool.shutdownNow();
1605-
cleanerPool = null;
1609+
if (hfileCleanerPool != null) {
1610+
hfileCleanerPool.shutdownNow();
1611+
hfileCleanerPool = null;
1612+
}
1613+
if (logCleanerPool != null) {
1614+
logCleanerPool.shutdownNow();
1615+
logCleanerPool = null;
16061616
}
16071617
if (customCleanerPool != null) {
16081618
customCleanerPool.shutdownNow();

hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,15 +58,20 @@ public abstract class CleanerChore<T extends FileCleanerDelegate> extends Schedu
5858
private static final int AVAIL_PROCESSORS = Runtime.getRuntime().availableProcessors();
5959

6060
/**
61+
* Configures the threadpool used for scanning the archive directory for the HFileCleaner
6162
* If it is an integer and >= 1, it would be the size;
6263
* if 0.0 < size <= 1.0, size would be available processors * size.
6364
* Pay attention that 1.0 is different from 1, former indicates it will use 100% of cores,
6465
* while latter will use only 1 thread for chore to scan dir.
6566
*/
6667
public static final String CHORE_POOL_SIZE = "hbase.cleaner.scan.dir.concurrent.size";
6768
static final String DEFAULT_CHORE_POOL_SIZE = "0.25";
68-
69-
public static final String CUSTOM_POOL_SIZE = "hbase.cleaner.customized.paths.pool.size";
69+
/**
70+
* Configures the threadpool used for scanning the Old logs directory for the LogCleaner
71+
* Follows the same configuration mechanism as CHORE_POOL_SIZE, but has a default of 1 thread.
72+
*/
73+
public static final String LOG_CLEANER_CHORE_SIZE = "hbase.log.cleaner.scan.dir.concurrent.size";
74+
static final String DEFAULT_LOG_CLEANER_CHORE_POOL_SIZE = "1";
7075

7176
private final DirScanPool pool;
7277

hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/DirScanPool.java

Lines changed: 39 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -40,24 +40,42 @@ public class DirScanPool implements ConfigurationObserver {
4040
private final ThreadPoolExecutor pool;
4141
private int cleanerLatch;
4242
private boolean reconfigNotification;
43+
private Type dirScanPoolType;
44+
private final String name;
4345

44-
public DirScanPool(Configuration conf) {
45-
this(conf.get(CleanerChore.CHORE_POOL_SIZE, CleanerChore.DEFAULT_CHORE_POOL_SIZE));
46+
private enum Type {
47+
LOG_CLEANER(CleanerChore.LOG_CLEANER_CHORE_SIZE,
48+
CleanerChore.DEFAULT_LOG_CLEANER_CHORE_POOL_SIZE),
49+
HFILE_CLEANER(CleanerChore.CHORE_POOL_SIZE, CleanerChore.DEFAULT_CHORE_POOL_SIZE);
50+
51+
private final String cleanerPoolSizeConfigName;
52+
private final String cleanerPoolSizeConfigDefault;
53+
54+
private Type(String cleanerPoolSizeConfigName, String cleanerPoolSizeConfigDefault) {
55+
this.cleanerPoolSizeConfigName = cleanerPoolSizeConfigName;
56+
this.cleanerPoolSizeConfigDefault = cleanerPoolSizeConfigDefault;
57+
}
4658
}
4759

48-
public DirScanPool(String poolSize) {
60+
private DirScanPool(Configuration conf, Type dirScanPoolType) {
61+
this.dirScanPoolType = dirScanPoolType;
62+
this.name = dirScanPoolType.name().toLowerCase();
63+
String poolSize = conf.get(dirScanPoolType.cleanerPoolSizeConfigName,
64+
dirScanPoolType.cleanerPoolSizeConfigDefault);
65+
4966
size = CleanerChore.calculatePoolSize(poolSize);
5067
// poolSize may be 0 or 0.0 from a careless configuration,
5168
// double check to make sure.
52-
size = size == 0 ? CleanerChore.calculatePoolSize(CleanerChore.DEFAULT_CHORE_POOL_SIZE) : size;
53-
pool = initializePool(size);
54-
LOG.info("Cleaner pool size is {}", size);
69+
size = size == 0 ?
70+
CleanerChore.calculatePoolSize(dirScanPoolType.cleanerPoolSizeConfigDefault) : size;
71+
pool = initializePool(size, name);
72+
LOG.info("{} Cleaner pool size is {}", name, size);
5573
cleanerLatch = 0;
5674
}
5775

58-
private static ThreadPoolExecutor initializePool(int size) {
76+
private static ThreadPoolExecutor initializePool(int size, String name) {
5977
return Threads.getBoundedCachedThreadPool(size, 1, TimeUnit.MINUTES,
60-
new ThreadFactoryBuilder().setNameFormat("dir-scan-pool-%d").setDaemon(true)
78+
new ThreadFactoryBuilder().setNameFormat(name + "-dir-scan-pool-%d").setDaemon(true)
6179
.setUncaughtExceptionHandler(Threads.LOGGING_EXCEPTION_HANDLER).build());
6280
}
6381

@@ -68,9 +86,11 @@ private static ThreadPoolExecutor initializePool(int size) {
6886
@Override
6987
public synchronized void onConfigurationChange(Configuration conf) {
7088
int newSize = CleanerChore.calculatePoolSize(
71-
conf.get(CleanerChore.CHORE_POOL_SIZE, CleanerChore.DEFAULT_CHORE_POOL_SIZE));
89+
conf.get(dirScanPoolType.cleanerPoolSizeConfigName,
90+
dirScanPoolType.cleanerPoolSizeConfigDefault));
7291
if (newSize == size) {
73-
LOG.trace("Size from configuration is same as previous={}, no need to update.", newSize);
92+
LOG.trace("{} Cleaner Size from configuration is same as previous={}, no need to update.",
93+
name, newSize);
7494
return;
7595
}
7696
size = newSize;
@@ -113,11 +133,19 @@ synchronized void tryUpdatePoolSize(long timeout) {
113133
break;
114134
}
115135
}
116-
LOG.info("Update chore's pool size from {} to {}", pool.getPoolSize(), size);
136+
LOG.info("Update {} chore's pool size from {} to {}", name, pool.getPoolSize(), size);
117137
pool.setCorePoolSize(size);
118138
}
119139

120140
public int getSize() {
121141
return size;
122142
}
143+
144+
public static DirScanPool getHFileCleanerScanPool(Configuration conf) {
145+
return new DirScanPool(conf, Type.HFILE_CLEANER);
146+
}
147+
148+
public static DirScanPool getLogCleanerScanPool(Configuration conf) {
149+
return new DirScanPool(conf, Type.LOG_CLEANER);
150+
}
123151
}

hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ public static void setupCluster() throws Exception {
111111
// We don't want the cleaner to remove files. The tests do that.
112112
UTIL.getMiniHBaseCluster().getMaster().getHFileCleaner().cancel(true);
113113

114-
POOL = new DirScanPool(UTIL.getConfiguration());
114+
POOL = DirScanPool.getHFileCleanerScanPool(UTIL.getConfiguration());
115115
}
116116

117117
private static void setupConf(Configuration conf) {

hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ public static void setupCluster() throws Exception {
119119
String archivingZNode = ZKTableArchiveClient.getArchiveZNode(UTIL.getConfiguration(), watcher);
120120
ZKUtil.createWithParents(watcher, archivingZNode);
121121
rss = mock(RegionServerServices.class);
122-
POOL = new DirScanPool(UTIL.getConfiguration());
122+
POOL= DirScanPool.getHFileCleanerScanPool(UTIL.getConfiguration());
123123
}
124124

125125
private static void setupConf(Configuration conf) {

hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java

Lines changed: 52 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ public class TestCleanerChore {
6161

6262
@BeforeClass
6363
public static void setup() {
64-
POOL = new DirScanPool(UTIL.getConfiguration());
64+
POOL = DirScanPool.getHFileCleanerScanPool(UTIL.getConfiguration());
6565
}
6666

6767
@AfterClass
@@ -469,6 +469,57 @@ public void testOnConfigurationChange() throws Exception {
469469
t.join();
470470
}
471471

472+
@Test
473+
public void testOnConfigurationChangeLogCleaner() throws Exception {
474+
int availableProcessorNum = Runtime.getRuntime().availableProcessors();
475+
if (availableProcessorNum == 1) { // no need to run this test
476+
return;
477+
}
478+
479+
DirScanPool pool = DirScanPool.getLogCleanerScanPool(UTIL.getConfiguration());
480+
481+
// have at least 2 available processors/cores
482+
int initPoolSize = availableProcessorNum / 2;
483+
int changedPoolSize = availableProcessorNum;
484+
485+
Stoppable stop = new StoppableImplementation();
486+
Configuration conf = UTIL.getConfiguration();
487+
Path testDir = UTIL.getDataTestDir();
488+
FileSystem fs = UTIL.getTestFileSystem();
489+
String confKey = "hbase.test.cleaner.delegates";
490+
conf.set(confKey, AlwaysDelete.class.getName());
491+
conf.set(CleanerChore.LOG_CLEANER_CHORE_SIZE, String.valueOf(initPoolSize));
492+
final AllValidPaths chore =
493+
new AllValidPaths("test-file-cleaner", stop, conf, fs, testDir, confKey, pool);
494+
chore.setEnabled(true);
495+
// Create subdirs under testDir
496+
int dirNums = 6;
497+
Path[] subdirs = new Path[dirNums];
498+
for (int i = 0; i < dirNums; i++) {
499+
subdirs[i] = new Path(testDir, "subdir-" + i);
500+
fs.mkdirs(subdirs[i]);
501+
}
502+
// Under each subdirs create 6 files
503+
for (Path subdir : subdirs) {
504+
createFiles(fs, subdir, 6);
505+
}
506+
// Start chore
507+
Thread t = new Thread(new Runnable() {
508+
@Override
509+
public void run() {
510+
chore.chore();
511+
}
512+
});
513+
t.setDaemon(true);
514+
t.start();
515+
// Change size of chore's pool
516+
conf.set(CleanerChore.LOG_CLEANER_CHORE_SIZE, String.valueOf(changedPoolSize));
517+
pool.onConfigurationChange(conf);
518+
assertEquals(changedPoolSize, chore.getChorePoolSize());
519+
// Stop chore
520+
t.join();
521+
}
522+
472523
@Test
473524
public void testMinimumNumberOfThreads() throws Exception {
474525
Configuration conf = UTIL.getConfiguration();

hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ public class TestHFileCleaner {
7171
public static void setupCluster() throws Exception {
7272
// have to use a minidfs cluster because the localfs doesn't modify file times correctly
7373
UTIL.startMiniDFSCluster(1);
74-
POOL = new DirScanPool(UTIL.getConfiguration());
74+
POOL = DirScanPool.getHFileCleanerScanPool(UTIL.getConfiguration());
7575
}
7676

7777
@AfterClass

hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ public class TestHFileLinkCleaner {
6767

6868
@BeforeClass
6969
public static void setUp() {
70-
POOL = new DirScanPool(TEST_UTIL.getConfiguration());
70+
POOL = DirScanPool.getHFileCleanerScanPool(TEST_UTIL.getConfiguration());
7171
}
7272

7373
@AfterClass

0 commit comments

Comments
 (0)