diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java index 9dbb307df402..0d3e5f5192b1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java @@ -20,8 +20,6 @@ import java.util.ArrayList; import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map.Entry; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadFactory; @@ -151,20 +149,18 @@ public synchronized boolean scheduleChore(ScheduledChore chore) { try { if (chore.getPeriod() <= 0) { - LOG.info("Chore {} is disabled because its period is not positive.", chore); + LOG.info("Chore {} is disabled because its period is not positive", chore); return false; } - LOG.info("Chore {} is enabled.", chore); + LOG.info("Chore {} is enabled", chore); chore.setChoreServicer(this); ScheduledFuture future = scheduler.scheduleAtFixedRate(chore, chore.getInitialDelay(), chore.getPeriod(), chore.getTimeUnit()); scheduledChores.put(chore, future); return true; - } catch (Exception exception) { - if (LOG.isInfoEnabled()) { - LOG.info("Could not successfully schedule chore: " + chore.getName()); - } + } catch (Exception ex) { + LOG.error("Could not successfully schedule chore: {}", chore, ex); return false; } } @@ -280,7 +276,12 @@ private synchronized boolean requestCorePoolIncrease() { // amongst occurrences of the same chore). if (scheduler.getCorePoolSize() < scheduledChores.size()) { scheduler.setCorePoolSize(scheduler.getCorePoolSize() + 1); - printChoreServiceDetails("requestCorePoolIncrease"); + + LOG.trace("requestCorePoolIncrease"); + LOG.trace("ChoreService corePoolSize: {}", getCorePoolSize()); + LOG.trace("ChoreService scheduledChores: {}", getNumberOfScheduledChores()); + LOG.trace("ChoreService missingStartTimeCount: {}", getNumberOfChoresMissingStartTime()); + return true; } return false; @@ -294,7 +295,11 @@ private synchronized boolean requestCorePoolIncrease() { private synchronized void requestCorePoolDecrease() { if (scheduler.getCorePoolSize() > MIN_CORE_POOL_SIZE) { scheduler.setCorePoolSize(scheduler.getCorePoolSize() - 1); - printChoreServiceDetails("requestCorePoolDecrease"); + + LOG.trace("requestCorePoolDecrease"); + LOG.trace("ChoreService corePoolSize: {}", getCorePoolSize()); + LOG.trace("ChoreService scheduledChores: {}", getNumberOfScheduledChores()); + LOG.trace("ChoreService missingStartTimeCount: {}", getNumberOfChoresMissingStartTime()); } } @@ -315,7 +320,12 @@ public synchronized void onChoreMissedStartTime(ScheduledChore chore) { // more on each iteration. This hurts us because the ScheduledThreadPoolExecutor allocates // idle threads to chores based on how delayed they are. rescheduleChore(chore); - printChoreDetails("onChoreMissedStartTime", chore); + + if (LOG.isTraceEnabled()) { + LOG.trace("onChoreMissedStartTime"); + LOG.trace("Chore: {}", chore); + LOG.trace("Chore timeBetweenRuns: {}", chore.getTimeBetweenRuns()); + } } /** @@ -325,10 +335,8 @@ public synchronized void onChoreMissedStartTime(ScheduledChore chore) { */ public synchronized void shutdown() { scheduler.shutdownNow(); - if (LOG.isInfoEnabled()) { - LOG.info("Chore service for: " + coreThreadPoolPrefix + " had " + scheduledChores.keySet() - + " on shutdown"); - } + LOG.info("Chore service for: {} had {} on shutdown", coreThreadPoolPrefix, + scheduledChores.keySet()); cancelAllChores(true); scheduledChores.clear(); choresMissingStartTime.clear(); @@ -359,34 +367,4 @@ private void cancelAllChores(final boolean mayInterruptIfRunning) { } } - /** - * Prints a summary of important details about the chore. Used for debugging purposes - */ - private void printChoreDetails(final String header, ScheduledChore chore) { - LinkedHashMap output = new LinkedHashMap<>(); - output.put(header, ""); - output.put("Chore name: ", chore.getName()); - output.put("Chore period: ", Integer.toString(chore.getPeriod())); - output.put("Chore timeBetweenRuns: ", Long.toString(chore.getTimeBetweenRuns())); - - for (Entry entry : output.entrySet()) { - if (LOG.isTraceEnabled()) LOG.trace(entry.getKey() + entry.getValue()); - } - } - - /** - * Prints a summary of important details about the service. Used for debugging purposes - */ - private void printChoreServiceDetails(final String header) { - LinkedHashMap output = new LinkedHashMap<>(); - output.put(header, ""); - output.put("ChoreService corePoolSize: ", Integer.toString(getCorePoolSize())); - output.put("ChoreService scheduledChores: ", Integer.toString(getNumberOfScheduledChores())); - output.put("ChoreService missingStartTimeCount: ", - Integer.toString(getNumberOfChoresMissingStartTime())); - - for (Entry entry : output.entrySet()) { - if (LOG.isTraceEnabled()) LOG.trace(entry.getKey() + entry.getValue()); - } - } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java index 0f39e8b4ac1c..8ac999c66ee5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java @@ -165,7 +165,7 @@ public static boolean isShowConfInServlet() { } } catch (LinkageError e) { // should we handle it more aggressively in addition to log the error? - LOG.warn("Error thrown: ", e); + LOG.warn("Failed to load ConfServlet", e); } catch (ClassNotFoundException ce) { LOG.debug("ClassNotFound: ConfServlet"); // ignore @@ -191,29 +191,22 @@ public static String getPassword(Configuration conf, String alias, Method m = Configuration.class.getMethod("getPassword", String.class); char[] p = (char[]) m.invoke(conf, alias); if (p != null) { - LOG.debug(String.format("Config option \"%s\" was found through" + - " the Configuration getPassword method.", alias)); + LOG.debug( + "Config option \"{}\" was found through the Configuration getPassword method", + alias); passwd = new String(p); } else { - LOG.debug(String.format( - "Config option \"%s\" was not found. Using provided default value", - alias)); + LOG.debug("Config option \"{}\" was not found. Using provided default value", alias); passwd = defPass; } } catch (NoSuchMethodException e) { // this is a version of Hadoop where the credential //provider API doesn't exist yet - LOG.debug(String.format( - "Credential.getPassword method is not available." + - " Falling back to configuration.")); + LOG.debug("Credential.getPassword method is not available. Falling back to configuration", + e); passwd = conf.get(alias, defPass); - } catch (SecurityException e) { - throw new IOException(e.getMessage(), e); - } catch (IllegalAccessException e) { - throw new IOException(e.getMessage(), e); - } catch (IllegalArgumentException e) { - throw new IOException(e.getMessage(), e); - } catch (InvocationTargetException e) { + } catch (SecurityException | IllegalAccessException | IllegalArgumentException + | InvocationTargetException e) { throw new IOException(e.getMessage(), e); } return passwd; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java index 8a17ce975dc8..c9d6bf62d449 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java @@ -35,8 +35,6 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.WritableUtils; import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.base.Function; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; @@ -48,8 +46,6 @@ @InterfaceAudience.Private public class KeyValueUtil { - private static final Logger LOG = LoggerFactory.getLogger(KeyValueUtil.class); - /**************** length *********************/ public static int length(short rlen, byte flen, int qlen, int vlen, int tlen, boolean withTags) { @@ -519,119 +515,89 @@ static String bytesToHex(byte[] buf, int offset, int length) { static void checkKeyValueBytes(byte[] buf, int offset, int length, boolean withTags) { if (buf == null) { - String msg = "Invalid to have null byte array in KeyValue."; - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException("Invalid to have null byte array in KeyValue."); } int pos = offset, endOffset = offset + length; // check the key if (pos + Bytes.SIZEOF_INT > endOffset) { - String msg = - "Overflow when reading key length at position=" + pos + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException( + "Overflow when reading key length at position=" + pos + bytesToHex(buf, offset, length)); } int keyLen = Bytes.toInt(buf, pos, Bytes.SIZEOF_INT); pos += Bytes.SIZEOF_INT; if (keyLen <= 0 || pos + keyLen > endOffset) { - String msg = - "Invalid key length in KeyValue. keyLength=" + keyLen + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException("Invalid key length in KeyValue. keyLength=" + keyLen + bytesToHex(buf, offset, length)); } // check the value if (pos + Bytes.SIZEOF_INT > endOffset) { - String msg = - "Overflow when reading value length at position=" + pos + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException("Overflow when reading value length at position=" + pos + + bytesToHex(buf, offset, length)); } int valLen = Bytes.toInt(buf, pos, Bytes.SIZEOF_INT); pos += Bytes.SIZEOF_INT; if (valLen < 0 || pos + valLen > endOffset) { - String msg = "Invalid value length in KeyValue, valueLength=" + valLen + - bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException("Invalid value length in KeyValue, valueLength=" + valLen + + bytesToHex(buf, offset, length)); } // check the row if (pos + Bytes.SIZEOF_SHORT > endOffset) { - String msg = - "Overflow when reading row length at position=" + pos + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException( + "Overflow when reading row length at position=" + pos + bytesToHex(buf, offset, length)); } short rowLen = Bytes.toShort(buf, pos, Bytes.SIZEOF_SHORT); pos += Bytes.SIZEOF_SHORT; if (rowLen < 0 || pos + rowLen > endOffset) { - String msg = - "Invalid row length in KeyValue, rowLength=" + rowLen + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException( + "Invalid row length in KeyValue, rowLength=" + rowLen + bytesToHex(buf, offset, length)); } pos += rowLen; // check the family if (pos + Bytes.SIZEOF_BYTE > endOffset) { - String msg = "Overflow when reading family length at position=" + pos + - bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException("Overflow when reading family length at position=" + pos + + bytesToHex(buf, offset, length)); } int familyLen = buf[pos]; pos += Bytes.SIZEOF_BYTE; if (familyLen < 0 || pos + familyLen > endOffset) { - String msg = "Invalid family length in KeyValue, familyLength=" + familyLen + - bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException("Invalid family length in KeyValue, familyLength=" + + familyLen + bytesToHex(buf, offset, length)); } pos += familyLen; // check the qualifier int qualifierLen = keyLen - Bytes.SIZEOF_SHORT - rowLen - Bytes.SIZEOF_BYTE - familyLen - Bytes.SIZEOF_LONG - Bytes.SIZEOF_BYTE; if (qualifierLen < 0 || pos + qualifierLen > endOffset) { - String msg = "Invalid qualifier length in KeyValue, qualifierLen=" + qualifierLen + - bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException("Invalid qualifier length in KeyValue, qualifierLen=" + qualifierLen + + bytesToHex(buf, offset, length)); } pos += qualifierLen; // check the timestamp if (pos + Bytes.SIZEOF_LONG > endOffset) { - String msg = - "Overflow when reading timestamp at position=" + pos + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException( + "Overflow when reading timestamp at position=" + pos + bytesToHex(buf, offset, length)); } long timestamp = Bytes.toLong(buf, pos, Bytes.SIZEOF_LONG); if (timestamp < 0) { - String msg = - "Timestamp cannot be negative, ts=" + timestamp + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException( + "Timestamp cannot be negative, ts=" + timestamp + bytesToHex(buf, offset, length)); } pos += Bytes.SIZEOF_LONG; // check the type if (pos + Bytes.SIZEOF_BYTE > endOffset) { - String msg = - "Overflow when reading type at position=" + pos + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException( + "Overflow when reading type at position=" + pos + bytesToHex(buf, offset, length)); } byte type = buf[pos]; if (!Type.isValidType(type)) { - String msg = "Invalid type in KeyValue, type=" + type + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException( + "Invalid type in KeyValue, type=" + type + bytesToHex(buf, offset, length)); } pos += Bytes.SIZEOF_BYTE; // check the value if (pos + valLen > endOffset) { - String msg = - "Overflow when reading value part at position=" + pos + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException( + "Overflow when reading value part at position=" + pos + bytesToHex(buf, offset, length)); } pos += valLen; // check the tags @@ -643,46 +609,36 @@ static void checkKeyValueBytes(byte[] buf, int offset, int length, boolean withT pos = checkKeyValueTagBytes(buf, offset, length, pos, endOffset); } if (pos != endOffset) { - String msg = "Some redundant bytes in KeyValue's buffer, startOffset=" + pos + ", endOffset=" - + endOffset + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException("Some redundant bytes in KeyValue's buffer, startOffset=" + + pos + ", endOffset=" + endOffset + bytesToHex(buf, offset, length)); } } private static int checkKeyValueTagBytes(byte[] buf, int offset, int length, int pos, int endOffset) { if (pos + Bytes.SIZEOF_SHORT > endOffset) { - String msg = "Overflow when reading tags length at position=" + pos + - bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException( + "Overflow when reading tags length at position=" + pos + bytesToHex(buf, offset, length)); } short tagsLen = Bytes.toShort(buf, pos); pos += Bytes.SIZEOF_SHORT; if (tagsLen < 0 || pos + tagsLen > endOffset) { - String msg = "Invalid tags length in KeyValue at position=" + (pos - Bytes.SIZEOF_SHORT) - + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException("Invalid tags length in KeyValue at position=" + + (pos - Bytes.SIZEOF_SHORT) + bytesToHex(buf, offset, length)); } int tagsEndOffset = pos + tagsLen; for (; pos < tagsEndOffset;) { if (pos + Tag.TAG_LENGTH_SIZE > endOffset) { - String msg = "Overflow when reading tag length at position=" + pos + - bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + throw new IllegalArgumentException("Overflow when reading tag length at position=" + pos + + bytesToHex(buf, offset, length)); } short tagLen = Bytes.toShort(buf, pos); pos += Tag.TAG_LENGTH_SIZE; // tagLen contains one byte tag type, so must be not less than 1. if (tagLen < 1 || pos + tagLen > endOffset) { - String msg = + throw new IllegalArgumentException( "Invalid tag length at position=" + (pos - Tag.TAG_LENGTH_SIZE) + ", tagLength=" - + tagLen + bytesToHex(buf, offset, length); - LOG.warn(msg); - throw new IllegalArgumentException(msg); + + tagLen + bytesToHex(buf, offset, length)); } pos += tagLen; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java index e1a96bdcf484..546812495065 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java @@ -81,9 +81,7 @@ private void rethrowEofException(IOException ioEx) throws IOException { LOG.trace("Error getting available for error message - ignoring", t); } if (!isEof) throw ioEx; - if (LOG.isTraceEnabled()) { - LOG.trace("Partial cell read caused by EOF", ioEx); - } + LOG.trace("Partial cell read caused by EOF", ioEx); EOFException eofEx = new EOFException("Partial cell read"); eofEx.initCause(ioEx); throw eofEx; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java index 3004973fd4ae..baa78828ade5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java @@ -354,7 +354,7 @@ public Compressor getCompressor() { CompressionCodec codec = getCodec(conf); if (codec != null) { Compressor compressor = CodecPool.getCompressor(codec); - if (LOG.isTraceEnabled()) LOG.trace("Retrieved compressor " + compressor + " from pool."); + LOG.trace("Retrieved compressor {} from pool", compressor); if (compressor != null) { if (compressor.finished()) { // Somebody returns the compressor to CodecPool but is still using it. @@ -369,7 +369,7 @@ public Compressor getCompressor() { public void returnCompressor(Compressor compressor) { if (compressor != null) { - if (LOG.isTraceEnabled()) LOG.trace("Returning compressor " + compressor + " to pool."); + LOG.trace("Returning compressor {} to pool", compressor); CodecPool.returnCompressor(compressor); } } @@ -378,7 +378,7 @@ public Decompressor getDecompressor() { CompressionCodec codec = getCodec(conf); if (codec != null) { Decompressor decompressor = CodecPool.getDecompressor(codec); - if (LOG.isTraceEnabled()) LOG.trace("Retrieved decompressor " + decompressor + " from pool."); + LOG.trace("Retrieved decompressor {} from pool", decompressor); if (decompressor != null) { if (decompressor.finished()) { // Somebody returns the decompressor to CodecPool but is still using it. @@ -394,10 +394,10 @@ public Decompressor getDecompressor() { public void returnDecompressor(Decompressor decompressor) { if (decompressor != null) { - if (LOG.isTraceEnabled()) LOG.trace("Returning decompressor " + decompressor + " to pool."); + LOG.trace("Returning decompressor {} to pool", decompressor); CodecPool.returnDecompressor(decompressor); if (decompressor.getClass().isAnnotationPresent(DoNotPool.class)) { - if (LOG.isTraceEnabled()) LOG.trace("Ending decompressor " + decompressor); + LOG.trace("Ending decompressor {}", decompressor); decompressor.end(); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java index af0089d02cda..608a39b97e15 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java @@ -547,9 +547,7 @@ public static KeyProvider getKeyProvider(Configuration conf) { getClassLoaderForClass(KeyProvider.class).loadClass(providerClassName), conf); provider.init(providerParameters); - if (LOG.isDebugEnabled()) { - LOG.debug("Installed " + providerClassName + " into key provider cache"); - } + LOG.debug("Installed {} into key provider cache", providerClassName); keyProviderCache.put(providerCacheKey, provider); return provider; } catch (Exception e) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java index b967db7f27dc..0eafb1dbaa88 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java @@ -113,7 +113,7 @@ public synchronized void closeReceivers() { try { rcvr.close(); } catch (IOException e) { - LOG.warn("Unable to close SpanReceiver correctly: " + e.getMessage(), e); + LOG.warn("Unable to close SpanReceiver correctly", e); } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java index 42d1bf4fa006..40bf1a94b6c9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java @@ -99,7 +99,6 @@ private void createBuffers(int threadCount, ByteBufferAllocator alloc) throws IO } assert bufferIndex == bufferCount; } catch (Exception e) { - LOG.error("Buffer creation interrupted", e); throw new IOException(e); } } finally { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java index 6f88c005cb86..1f9094225ffb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java @@ -196,7 +196,7 @@ int headerSize() { return (int) UnsafeAccess.theUnsafe.objectFieldOffset( HeaderSize.class.getDeclaredField("a")); } catch (NoSuchFieldException | SecurityException e) { - LOG.error(e.toString(), e); + LOG.error("Unable to determine header size", e); } return super.headerSize(); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java index ea0cb2bc3cb8..54a281f25229 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java @@ -483,33 +483,25 @@ public static void setStoragePolicy(final FileSystem fs, final Path path, static void setStoragePolicy(final FileSystem fs, final Path path, final String storagePolicy, boolean throwException) throws IOException { if (storagePolicy == null) { - if (LOG.isTraceEnabled()) { - LOG.trace("We were passed a null storagePolicy, exiting early."); - } + LOG.trace("Passed a null storagePolicy, exiting early"); return; } String trimmedStoragePolicy = storagePolicy.trim(); if (trimmedStoragePolicy.isEmpty()) { - if (LOG.isTraceEnabled()) { - LOG.trace("We were passed an empty storagePolicy, exiting early."); - } + LOG.trace("Passed an empty storagePolicy, exiting early"); return; } else { trimmedStoragePolicy = trimmedStoragePolicy.toUpperCase(Locale.ROOT); } if (trimmedStoragePolicy.equals(HConstants.DEFER_TO_HDFS_STORAGE_POLICY)) { - if (LOG.isTraceEnabled()) { - LOG.trace("We were passed the defer-to-hdfs policy {}, exiting early.", - trimmedStoragePolicy); - } + LOG.trace("Passed the defer-to-hdfs policy {}, exiting early", + trimmedStoragePolicy); return; } try { invokeSetStoragePolicy(fs, path, trimmedStoragePolicy); } catch (IOException e) { - if (LOG.isTraceEnabled()) { - LOG.trace("Failed to invoke set storage policy API on FS", e); - } + LOG.trace("Failed to invoke set storage policy API on FS", e); if (throwException) { throw e; } @@ -526,9 +518,7 @@ private static void invokeSetStoragePolicy(final FileSystem fs, final Path path, try { fs.setStoragePolicy(path, storagePolicy); - if (LOG.isDebugEnabled()) { - LOG.debug("Set storagePolicy={} for path={}", storagePolicy, path); - } + LOG.debug("Set storagePolicy={} for path={}", storagePolicy, path); } catch (Exception e) { toThrow = e; // This swallows FNFE, should we be throwing it? seems more likely to indicate dev @@ -554,16 +544,14 @@ private static void invokeSetStoragePolicy(final FileSystem fs, final Path path, // Hadoop 2.8+, 3.0-a1+ added FileSystem.setStoragePolicy with a default implementation // that throws UnsupportedOperationException } else if (e instanceof UnsupportedOperationException) { - if (LOG.isDebugEnabled()) { - LOG.debug("The underlying FileSystem implementation doesn't support " + - "setStoragePolicy. This is probably intentional on their part, since HDFS-9345 " + - "appears to be present in your version of Hadoop. For more information check " + - "the Hadoop documentation on 'ArchivalStorage', the Hadoop FileSystem " + - "specification docs from HADOOP-11981, and/or related documentation from the " + - "provider of the underlying FileSystem (its name should appear in the " + - "stacktrace that accompanies this message). Note in particular that Hadoop's " + - "local filesystem implementation doesn't support storage policies.", e); - } + LOG.debug("The underlying FileSystem implementation doesn't support " + + "setStoragePolicy. This is probably intentional on their part, since HDFS-9345 " + + "appears to be present in your version of Hadoop. For more information check " + + "the Hadoop documentation on 'ArchivalStorage', the Hadoop FileSystem " + + "specification docs from HADOOP-11981, and/or related documentation from the " + + "provider of the underlying FileSystem (its name should appear in the " + + "stacktrace that accompanies this message). Note in particular that Hadoop's " + + "local filesystem implementation doesn't support storage policies.", e); } } @@ -622,9 +610,7 @@ public static FileStatus[] listStatus(final FileSystem fs, status = filter == null ? fs.listStatus(dir) : fs.listStatus(dir, filter); } catch (FileNotFoundException fnfe) { // if directory doesn't exist, return null - if (LOG.isTraceEnabled()) { - LOG.trace("{} doesn't exist", dir); - } + LOG.trace("Directory {} does not exist", dir, fnfe); } if (status == null || status.length < 1) { return null; @@ -665,9 +651,7 @@ public static List listLocatedStatus(final FileSystem fs, } } catch (FileNotFoundException fnfe) { // if directory doesn't exist, return null - if (LOG.isTraceEnabled()) { - LOG.trace("{} doesn't exist", dir); - } + LOG.trace("Directory {} does not exist", dir, fnfe); } return status; } @@ -702,13 +686,13 @@ public static boolean isExists(final FileSystem fs, final Path path) throws IOEx * Log the current state of the filesystem from a certain root directory * @param fs filesystem to investigate * @param root root file/directory to start logging from - * @param log log to output information + * @param logger log to output information * @throws IOException if an unexpected exception occurs */ - public static void logFileSystemState(final FileSystem fs, final Path root, Logger log) + public static void logFileSystemState(final FileSystem fs, final Path root, final Logger logger) throws IOException { - log.debug("File system contents for path {}", root); - logFSTree(log, fs, root, "|-"); + LOG.debug("File system contents for path {}", root); + logFSTree(logger, fs, root, "|-"); } /** @@ -716,19 +700,21 @@ public static void logFileSystemState(final FileSystem fs, final Path root, Logg * * @see #logFileSystemState(FileSystem, Path, Logger) */ - private static void logFSTree(Logger log, final FileSystem fs, final Path root, String prefix) + private static void logFSTree(final Logger logger, final FileSystem fs, final Path root, String prefix) throws IOException { - FileStatus[] files = listStatus(fs, root, null); - if (files == null) { - return; - } + if (logger.isDebugEnabled()) { + FileStatus[] files = listStatus(fs, root, null); + if (files == null) { + return; + } - for (FileStatus file : files) { - if (file.isDirectory()) { - log.debug(prefix + file.getPath().getName() + "/"); - logFSTree(log, fs, file.getPath(), prefix + "---"); - } else { - log.debug(prefix + file.getPath().getName()); + for (FileStatus file : files) { + if (file.isDirectory()) { + logger.debug(prefix + file.getPath().getName() + "/"); + logFSTree(logger, fs, file.getPath(), prefix + "---"); + } else { + logger.debug(prefix + file.getPath().getName()); + } } } } @@ -801,8 +787,8 @@ private static class DfsBuilderUtility { allMethodsPresent = true; LOG.debug("Using builder API via reflection for DFS file creation."); } catch (NoSuchMethodException e) { - LOG.debug("Could not find method on builder; will use old DFS API for file creation {}", - e.getMessage()); + LOG.debug("Could not find method on builder; will use old DFS API for file creation", + e); } } } @@ -829,7 +815,7 @@ static FSDataOutputStream createHelper(FileSystem fs, Path path, boolean overwri return (FSDataOutputStream) buildMethod.invoke(builder); } catch (IllegalAccessException | InvocationTargetException e) { // Should have caught this failure during initialization, so log full trace here - LOG.warn("Couldn't use reflection with builder API", e); + LOG.warn("Could not use reflection with builder API", e); } } @@ -855,7 +841,7 @@ static FSDataOutputStream createHelper(FileSystem fs, Path path, boolean overwri return (FSDataOutputStream) buildMethod.invoke(builder); } catch (IllegalAccessException | InvocationTargetException e) { // Should have caught this failure during initialization, so log full trace here - LOG.warn("Couldn't use reflection with builder API", e); + LOG.warn("Failed to use reflection with builder API", e); } } @@ -908,7 +894,7 @@ private static class StreamCapabilities { "top of an alternate FileSystem implementation you should manually verify that " + "hflush and hsync are implemented; otherwise you risk data loss and hard to " + "diagnose errors when our assumptions are violated."); - LOG.debug("The first request to check for StreamCapabilities came from this stacktrace.", + LOG.debug("The first request to check for StreamCapabilities came from this stacktrace", exception); } finally { PRESENT = tmp; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java index f1589ba093bb..d51d3ea65ae4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java @@ -243,7 +243,7 @@ public static CoprocessorClassLoader getClassLoader(final Path path, CoprocessorClassLoader cl = getIfCached(path); String pathStr = path.toString(); if (cl != null) { - LOG.debug("Found classloader "+ cl + " for "+ pathStr); + LOG.debug("Found classloader {} for {}", cl, pathStr); return cl; } @@ -255,7 +255,7 @@ public static CoprocessorClassLoader getClassLoader(final Path path, try { cl = getIfCached(path); if (cl != null) { - LOG.debug("Found classloader "+ cl + " for "+ pathStr); + LOG.debug("Found classloader {} for {}", cl, pathStr); return cl; } @@ -293,10 +293,7 @@ public Class loadClass(String name, String[] includedClassPrefixes) throws ClassNotFoundException { // Delegate to the parent immediately if this class is exempt if (isClassExempt(name, includedClassPrefixes)) { - if (LOG.isDebugEnabled()) { - LOG.debug("Skipping exempt class " + name + - " - delegating directly to parent"); - } + LOG.debug("Skipping exempt class {} - delegating directly to parent", name); return parent.loadClass(name); } @@ -304,30 +301,22 @@ public Class loadClass(String name, String[] includedClassPrefixes) // Check whether the class has already been loaded: Class clasz = findLoadedClass(name); if (clasz != null) { - if (LOG.isDebugEnabled()) { - LOG.debug("Class " + name + " already loaded"); - } + LOG.debug("Class {} already loaded", name); } else { try { // Try to find this class using the URLs passed to this ClassLoader - if (LOG.isDebugEnabled()) { - LOG.debug("Finding class: " + name); - } + LOG.debug("Finding class: {}", name); clasz = findClass(name); } catch (ClassNotFoundException e) { // Class not found using this ClassLoader, so delegate to parent - if (LOG.isDebugEnabled()) { - LOG.debug("Class " + name + " not found - delegating to parent"); - } + LOG.debug("Class {} not found - delegating to parent", name); try { clasz = parent.loadClass(name); } catch (ClassNotFoundException e2) { // Class not found in this ClassLoader or in the parent ClassLoader // Log some debug output before re-throwing ClassNotFoundException - if (LOG.isDebugEnabled()) { - LOG.debug("Class " + name + " not found in parent loader"); - } + LOG.debug("Class {} not found in parent loader", name); throw e2; } } @@ -343,9 +332,7 @@ public URL getResource(String name) { // Delegate to the parent first if necessary if (loadResourceUsingParentFirst(name)) { - if (LOG.isDebugEnabled()) { - LOG.debug("Checking parent first for resource " + name); - } + LOG.debug("Checking parent first for resource: {}", name); resource = super.getResource(name); parentLoaded = true; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java index 9c242ab1f764..e2c72227fb8b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java @@ -162,21 +162,15 @@ private Class tryRefreshClass(String name) throws ClassNotFoundException { Class clasz = findLoadedClass(name); if (clasz != null) { - if (LOG.isDebugEnabled()) { - LOG.debug("Class {} already loaded", name); - } + LOG.debug("Class {} already loaded", name); } else { try { - if (LOG.isDebugEnabled()) { - LOG.debug("Finding class: {}", name); - } + LOG.debug("Finding class: {}", name); clasz = findClass(name); } catch (ClassNotFoundException cnfe) { // Load new jar files if any - if (LOG.isDebugEnabled()) { - LOG.debug("Loading new jar files, if any"); - } + LOG.debug("Loading new jar files, if any"); loadNewJars(); @@ -235,9 +229,7 @@ private synchronized void loadNewJars() { Path path = status.getPath(); String fileName = path.getName(); if (!fileName.endsWith(".jar")) { - if (LOG.isDebugEnabled()) { - LOG.debug("Ignored non-jar file {}", fileName); - } + LOG.debug("Ignored non-jar file {}", fileName); continue; // Ignore non-jar files } Long cachedLastModificationTime = jarModifiedTime.get(fileName); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java index c38f1a9f8b95..67efb7df8eab 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java @@ -24,8 +24,6 @@ import org.apache.commons.codec.binary.Hex; import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Utility class for MD5 @@ -33,7 +31,6 @@ */ @InterfaceAudience.Public public class MD5Hash { - private static final Logger LOG = LoggerFactory.getLogger(MD5Hash.class); /** * Given a byte array, returns in MD5 hash as a hex string. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java index 32a007b19121..6ef104f2b044 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java @@ -23,14 +23,10 @@ import java.lang.reflect.Method; import java.lang.reflect.UndeclaredThrowableException; -import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; @InterfaceAudience.Private -public final class Methods { - private static final Logger LOG = LoggerFactory.getLogger(Methods.class); +public class Methods { private Methods() { } @@ -41,12 +37,11 @@ public static Object call(Class clazz, T instance, String methodName, Method m = clazz.getMethod(methodName, types); return m.invoke(instance, args); } catch (IllegalArgumentException arge) { - LOG.error(HBaseMarkers.FATAL, "Constructed invalid call. class="+clazz.getName()+ - " method=" + methodName + " types=" + Classes.stringify(types), arge); - throw arge; + throw new IllegalArgumentException("Constructed invalid call. class=" + clazz.getName() + + " method=" + methodName + " types=" + Classes.stringify(types), arge); } catch (NoSuchMethodException nsme) { throw new IllegalArgumentException( - "Can't find method "+methodName+" in "+clazz.getName()+"!", nsme); + "Failed to find method " + methodName + " in " + clazz.getName(), nsme); } catch (InvocationTargetException ite) { // unwrap the underlying exception and rethrow if (ite.getTargetException() != null) { @@ -62,10 +57,8 @@ public static Object call(Class clazz, T instance, String methodName, throw new IllegalArgumentException( "Denied access calling "+clazz.getName()+"."+methodName+"()", iae); } catch (SecurityException se) { - LOG.error(HBaseMarkers.FATAL, "SecurityException calling method. class="+ - clazz.getName()+" method=" + methodName + " types=" + - Classes.stringify(types), se); - throw se; + throw new SecurityException("SecurityException calling method. class=" + clazz.getName() + + " method=" + methodName + " types=" + Classes.stringify(types), se); } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java index c72231ac08e6..6838f099016f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java @@ -32,7 +32,6 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.util.ReflectionUtils; -import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,8 +50,7 @@ public class Threads { new UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { - LOG.warn("Thread:" + t + " exited with Exception:" - + StringUtils.stringifyException(e)); + LOG.warn("Thread: {} exited with Exception", t, e); } };