Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -64,8 +64,8 @@ public LogRollBackupSubprocedurePool(String name, Configuration conf) {
this.name = name;
executor =
new ThreadPoolExecutor(1, threads, keepAlive, TimeUnit.SECONDS,
new LinkedBlockingQueue<>(), new DaemonThreadFactory("rs(" + name
+ ")-backup-pool"));
new LinkedBlockingQueue<>(),
Threads.newDaemonThreadFactory("rs(" + name + ")-backup"));
taskPool = new ExecutorCompletionService<>(executor);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,11 @@ public static ThreadPoolExecutor getBoundedCachedThreadPool(
return boundedCachedThreadPool;
}

public static ThreadPoolExecutor getBoundedCachedThreadPool(int maxCachedThread, long timeout,
TimeUnit unit, String prefix) {
return getBoundedCachedThreadPool(maxCachedThread, timeout, unit,
newDaemonThreadFactory(prefix));
}

/**
* Returns a {@link java.util.concurrent.ThreadFactory} that names each created thread uniquely,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@

package org.apache.hadoop.hbase.chaos.policies;

import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.chaos.actions.Action;
import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.util.StringUtils;

import java.util.concurrent.ExecutionException;
Expand All @@ -42,7 +42,7 @@ public TwoConcurrentActionPolicy(long sleepTime, Action[] actionsOne, Action[] a
this.actionsOne = actionsOne;
this.actionsTwo = actionsTwo;
executor = Executors.newFixedThreadPool(2,
new DaemonThreadFactory("TwoConcurrentAction-"));
Threads.newDaemonThreadFactory("TwoConcurrentAction"));
}

@Override
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -66,7 +66,7 @@ public void start() {
60,
TimeUnit.SECONDS,
new ArrayBlockingQueue<>(maxQueueLength),
new DaemonThreadFactory("FifoRpcScheduler.handler"),
Threads.newDaemonThreadFactory("FifoRpcScheduler.handler"),
new ThreadPoolExecutor.CallerRunsPolicy());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import java.util.concurrent.atomic.AtomicInteger;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger;
Expand Down Expand Up @@ -72,11 +72,11 @@ public void start() {
rsRsreportMaxQueueLength);
this.executor = new ThreadPoolExecutor(handlerCount, handlerCount, 60, TimeUnit.SECONDS,
new ArrayBlockingQueue<Runnable>(maxQueueLength),
new DaemonThreadFactory("MasterFifoRpcScheduler.call.handler"),
Threads.newDaemonThreadFactory("MasterFifoRpcScheduler.call.handler"),
new ThreadPoolExecutor.CallerRunsPolicy());
this.rsReportExecutor = new ThreadPoolExecutor(rsReportHandlerCount, rsReportHandlerCount, 60,
TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(rsRsreportMaxQueueLength),
new DaemonThreadFactory("MasterFifoRpcScheduler.RSReport.handler"),
Threads.newDaemonThreadFactory("MasterFifoRpcScheduler.RSReport.handler"),
new ThreadPoolExecutor.CallerRunsPolicy());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,12 @@
*/
package org.apache.hadoop.hbase.master.cleaner;

import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.conf.ConfigurationObserver;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -51,10 +50,7 @@ public DirScanPool(Configuration conf) {
}

private static ThreadPoolExecutor initializePool(int size) {
ThreadPoolExecutor executor = new ThreadPoolExecutor(size, size, 1, TimeUnit.MINUTES,
new LinkedBlockingQueue<>(), new DaemonThreadFactory("dir-scan-pool"));
executor.allowCoreThreadTimeOut(true);
return executor;
return Threads.getBoundedCachedThreadPool(size, 1, TimeUnit.MINUTES, "dir-scan");
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,14 @@
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;

import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.com.google.common.collect.MapMaker;

/**
Expand Down Expand Up @@ -113,7 +113,7 @@ public static ThreadPoolExecutor defaultPool(String coordName, int opThreads,
long keepAliveMillis) {
return new ThreadPoolExecutor(1, opThreads, keepAliveMillis, TimeUnit.MILLISECONDS,
new SynchronousQueue<>(),
new DaemonThreadFactory("(" + coordName + ")-proc-coordinator-pool"));
Threads.newDaemonThreadFactory("(" + coordName + ")-proc-coordinator"));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,11 @@
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException;

import org.apache.hbase.thirdparty.com.google.common.collect.MapMaker;

Expand Down Expand Up @@ -87,7 +87,7 @@ public static ThreadPoolExecutor defaultPool(String memberName, int procThreads,
long keepAliveMillis) {
return new ThreadPoolExecutor(1, procThreads, keepAliveMillis, TimeUnit.MILLISECONDS,
new SynchronousQueue<>(),
new DaemonThreadFactory("member: '" + memberName + "' subprocedure-pool"));
Threads.newDaemonThreadFactory("member: '" + memberName + "' subprocedure"));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.TableName;
Expand Down Expand Up @@ -215,7 +214,7 @@ static class FlushTableSubprocedurePool {
int threads = conf.getInt(CONCURENT_FLUSH_TASKS_KEY, DEFAULT_CONCURRENT_FLUSH_TASKS);
this.name = name;
executor = Threads.getBoundedCachedThreadPool(threads, keepAlive, TimeUnit.MILLISECONDS,
new DaemonThreadFactory("rs(" + name + ")-flush-proc-pool-"));
"rs(" + name + ")-flush-proc");
taskPool = new ExecutorCompletionService<>(executor);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.TableName;
Expand Down Expand Up @@ -284,7 +283,7 @@ static class SnapshotSubprocedurePool {
int threads = conf.getInt(CONCURENT_SNAPSHOT_TASKS_KEY, DEFAULT_CONCURRENT_SNAPSHOT_TASKS);
this.name = name;
executor = Threads.getBoundedCachedThreadPool(threads, keepAlive, TimeUnit.MILLISECONDS,
new DaemonThreadFactory("rs(" + name + ")-snapshot-pool-"));
"rs(" + name + ")-snapshot");
taskPool = new ExecutorCompletionService<>(executor);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@
package org.apache.hadoop.hbase.security.access;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
Expand Down Expand Up @@ -69,7 +69,7 @@ public ZKPermissionWatcher(ZKWatcher watcher,
String aclZnodeParent = conf.get("zookeeper.znode.acl.parent", ACL_NODE);
this.aclZNode = ZNodePaths.joinZNode(watcher.getZNodePaths().baseZNode, aclZnodeParent);
executor = Executors.newSingleThreadExecutor(
new DaemonThreadFactory("zk-permission-watcher"));
Threads.newDaemonThreadFactory("zk-permission-watcher"));
}

public void start() throws KeeperException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
Expand Down Expand Up @@ -127,7 +127,7 @@ public SimpleSubprocedurePool(String name, Configuration conf) {
this.name = name;
executor = new ThreadPoolExecutor(1, 1, 500,
TimeUnit.SECONDS, new LinkedBlockingQueue<>(),
new DaemonThreadFactory("rs(" + name + ")-procedure-pool-"));
Threads.newDaemonThreadFactory("rs(" + name + ")-procedure"));
taskPool = new ExecutorCompletionService<>(executor);
}

Expand Down