Skip to content

Commit f5b4cb0

Browse files
authored
Merge branch 'apache:trunk' into YARN-11446
2 parents b07e0e6 + 759ddeb commit f5b4cb0

File tree

27 files changed

+303
-558
lines changed

27 files changed

+303
-558
lines changed

LICENSE-binary

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -363,7 +363,7 @@ org.jetbrains.kotlin:kotlin-stdlib-common:1.4.10
363363
org.lz4:lz4-java:1.7.1
364364
org.objenesis:objenesis:2.6
365365
org.xerial.snappy:snappy-java:1.0.5
366-
org.yaml:snakeyaml:1.33
366+
org.yaml:snakeyaml:2.0
367367
org.wildfly.openssl:wildfly-openssl:1.1.3.Final
368368

369369

hadoop-common-project/hadoop-common/src/main/bin/hadoop

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,9 @@ MYNAME="${BASH_SOURCE-$0}"
2626
function hadoop_usage
2727
{
2828
hadoop_add_option "buildpaths" "attempt to add class files from build tree"
29-
hadoop_add_option "hostnames list[,of,host,names]" "hosts to use in slave mode"
29+
hadoop_add_option "hostnames list[,of,host,names]" "hosts to use in worker mode"
3030
hadoop_add_option "loglevel level" "set the log4j level for this command"
31-
hadoop_add_option "hosts filename" "list of hosts to use in slave mode"
31+
hadoop_add_option "hosts filename" "list of hosts to use in worker mode"
3232
hadoop_add_option "workers" "turn on worker mode"
3333

3434
hadoop_add_subcommand "checknative" client "check native Hadoop and compression libraries availability"

hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemons.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
# limitations under the License.
1717

1818

19-
# Run a Hadoop command on all slave hosts.
19+
# Run a Hadoop command on all worker hosts.
2020

2121
function hadoop_usage
2222
{

hadoop-common-project/hadoop-common/src/main/conf/log4j.properties

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -75,14 +75,6 @@ log4j.appender.console.target=System.err
7575
log4j.appender.console.layout=org.apache.log4j.PatternLayout
7676
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
7777

78-
#
79-
# TaskLog Appender
80-
#
81-
log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
82-
83-
log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
84-
log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
85-
8678
#
8779
# HDFS block state change log from block manager
8880
#

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,8 @@
3434

3535
import org.apache.hadoop.classification.VisibleForTesting;
3636
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
37+
import org.slf4j.LoggerFactory;
38+
3739
import org.apache.hadoop.HadoopIllegalArgumentException;
3840
import org.apache.hadoop.classification.InterfaceAudience;
3941
import org.apache.hadoop.classification.InterfaceStability;
@@ -44,6 +46,7 @@
4446
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
4547
import org.apache.hadoop.security.ssl.SSLFactory;
4648
import org.apache.hadoop.util.GenericOptionsParser;
49+
import org.apache.hadoop.util.GenericsUtil;
4750
import org.apache.hadoop.util.ServletUtil;
4851
import org.apache.hadoop.util.Tool;
4952
import org.apache.hadoop.util.ToolRunner;
@@ -338,14 +341,18 @@ public void doGet(HttpServletRequest request, HttpServletResponse response
338341
out.println(MARKER
339342
+ "Submitted Class Name: <b>" + logName + "</b><br />");
340343

341-
Logger log = Logger.getLogger(logName);
344+
org.slf4j.Logger log = LoggerFactory.getLogger(logName);
342345
out.println(MARKER
343346
+ "Log Class: <b>" + log.getClass().getName() +"</b><br />");
344347
if (level != null) {
345348
out.println(MARKER + "Submitted Level: <b>" + level + "</b><br />");
346349
}
347350

348-
process(log, level, out);
351+
if (GenericsUtil.isLog4jLogger(logName)) {
352+
process(Logger.getLogger(logName), level, out);
353+
} else {
354+
out.println("Sorry, setting log level is only supported for log4j loggers.<br />");
355+
}
349356
}
350357

351358
out.println(FORMS);

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericsUtil.java

Lines changed: 26 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020

2121
import java.lang.reflect.Array;
2222
import java.util.List;
23+
import java.util.concurrent.atomic.AtomicBoolean;
2324

2425
import org.apache.hadoop.classification.InterfaceAudience;
2526
import org.apache.hadoop.classification.InterfaceStability;
@@ -33,6 +34,14 @@
3334
@InterfaceStability.Unstable
3435
public class GenericsUtil {
3536

37+
private static final String SLF4J_LOG4J_ADAPTER_CLASS = "org.slf4j.impl.Log4jLoggerAdapter";
38+
39+
/**
40+
* Set to false only if log4j adapter class is not found in the classpath. Once set to false,
41+
* the utility method should not bother re-loading class again.
42+
*/
43+
private static final AtomicBoolean IS_LOG4J_LOGGER = new AtomicBoolean(true);
44+
3645
/**
3746
* Returns the Class object (of type <code>Class&lt;T&gt;</code>) of the
3847
* argument of type <code>T</code>.
@@ -87,12 +96,27 @@ public static boolean isLog4jLogger(Class<?> clazz) {
8796
if (clazz == null) {
8897
return false;
8998
}
90-
Logger log = LoggerFactory.getLogger(clazz);
99+
return isLog4jLogger(clazz.getName());
100+
}
101+
102+
/**
103+
* Determine whether the log of the given logger is of Log4J implementation.
104+
*
105+
* @param logger the logger name, usually class name as string.
106+
* @return true if the logger uses Log4J implementation.
107+
*/
108+
public static boolean isLog4jLogger(String logger) {
109+
if (logger == null || !IS_LOG4J_LOGGER.get()) {
110+
return false;
111+
}
112+
Logger log = LoggerFactory.getLogger(logger);
91113
try {
92-
Class log4jClass = Class.forName("org.slf4j.impl.Log4jLoggerAdapter");
114+
Class<?> log4jClass = Class.forName(SLF4J_LOG4J_ADAPTER_CLASS);
93115
return log4jClass.isInstance(log);
94116
} catch (ClassNotFoundException e) {
117+
IS_LOG4J_LOGGER.set(false);
95118
return false;
96119
}
97120
}
121+
98122
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericsUtil.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ public void testGetClass() {
140140

141141
@Test
142142
public void testIsLog4jLogger() throws Exception {
143-
assertFalse("False if clazz is null", GenericsUtil.isLog4jLogger(null));
143+
assertFalse("False if clazz is null", GenericsUtil.isLog4jLogger((Class<?>) null));
144144
assertTrue("The implementation is Log4j",
145145
GenericsUtil.isLog4jLogger(TestGenericsUtil.class));
146146
}

hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/NamenodeHeartbeatService.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -317,11 +317,8 @@ private void updateState() {
317317
if (!resolver.registerNamenode(report)) {
318318
LOG.warn("Cannot register namenode {}", report);
319319
}
320-
} catch (IOException e) {
321-
LOG.info("Cannot register namenode in the State Store");
322-
} catch (Exception ex) {
323-
LOG.error("Unhandled exception updating NN registration for {}",
324-
getNamenodeDesc(), ex);
320+
} catch (Exception e) {
321+
LOG.error("Cannot register namenode {} in the State Store", getNamenodeDesc(), e);
325322
}
326323
}
327324

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,7 @@
6363
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
6464
import org.apache.hadoop.hdfs.server.protocol.DisallowedDatanodeException;
6565
import org.apache.hadoop.hdfs.server.protocol.HeartbeatResponse;
66+
import org.apache.hadoop.hdfs.server.protocol.InvalidBlockReportLeaseException;
6667
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
6768
import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports;
6869
import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports;
@@ -791,6 +792,9 @@ private void offerService() throws Exception {
791792
shouldServiceRun = false;
792793
return;
793794
}
795+
if (InvalidBlockReportLeaseException.class.getName().equals(reClass)) {
796+
fullBlockReportLeaseId = 0;
797+
}
794798
LOG.warn("RemoteException in offerService", re);
795799
sleepAfterException();
796800
} catch (IOException e) {

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -172,6 +172,7 @@
172172
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport;
173173
import org.apache.hadoop.hdfs.server.protocol.FinalizeCommand;
174174
import org.apache.hadoop.hdfs.server.protocol.HeartbeatResponse;
175+
import org.apache.hadoop.hdfs.server.protocol.InvalidBlockReportLeaseException;
175176
import org.apache.hadoop.hdfs.server.protocol.NamenodeCommand;
176177
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
177178
import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration;
@@ -1651,6 +1652,8 @@ public DatanodeCommand blockReport(final DatanodeRegistration nodeReg,
16511652
bm.processReport(nodeReg, reports[index].getStorage(),
16521653
blocks, context));
16531654
}
1655+
} else {
1656+
throw new InvalidBlockReportLeaseException(context.getReportId(), context.getLeaseId());
16541657
}
16551658
} catch (UnregisteredNodeException une) {
16561659
LOG.warn("Datanode {} is attempting to report but not register yet.",

0 commit comments

Comments
 (0)