Skip to content

Commit 4e87b7d

Browse files
authored
Merge branch 'apache:trunk' into YARN-11011
2 parents 69cfc27 + f1ce273 commit 4e87b7d

20 files changed

Lines changed: 273 additions & 56 deletions

File tree

LICENSE-binary

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -339,20 +339,20 @@ org.apache.solr:solr-solrj:8.11.2
339339
org.apache.yetus:audience-annotations:0.5.0
340340
org.apache.zookeeper:zookeeper:3.7.2
341341
org.codehaus.jettison:jettison:1.5.4
342-
org.eclipse.jetty:jetty-annotations:9.4.51.v20230217
343-
org.eclipse.jetty:jetty-http:9.4.51.v20230217
344-
org.eclipse.jetty:jetty-io:9.4.51.v20230217
345-
org.eclipse.jetty:jetty-jndi:9.4.51.v20230217
346-
org.eclipse.jetty:jetty-plus:9.4.51.v20230217
347-
org.eclipse.jetty:jetty-security:9.4.51.v20230217
348-
org.eclipse.jetty:jetty-server:9.4.51.v20230217
349-
org.eclipse.jetty:jetty-servlet:9.4.51.v20230217
350-
org.eclipse.jetty:jetty-util:9.4.51.v20230217
351-
org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217
352-
org.eclipse.jetty:jetty-webapp:9.4.51.v20230217
353-
org.eclipse.jetty:jetty-xml:9.4.51.v20230217
354-
org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.51.v20230217
355-
org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.51.v20230217
342+
org.eclipse.jetty:jetty-annotations:9.4.53.v20231009
343+
org.eclipse.jetty:jetty-http:9.4.53.v20231009
344+
org.eclipse.jetty:jetty-io:9.4.53.v20231009
345+
org.eclipse.jetty:jetty-jndi:9.4.53.v20231009
346+
org.eclipse.jetty:jetty-plus:9.4.53.v20231009
347+
org.eclipse.jetty:jetty-security:9.4.53.v20231009
348+
org.eclipse.jetty:jetty-server:9.4.53.v20231009
349+
org.eclipse.jetty:jetty-servlet:9.4.53.v20231009
350+
org.eclipse.jetty:jetty-util:9.4.53.v20231009
351+
org.eclipse.jetty:jetty-util-ajax:9.4.53.v20231009
352+
org.eclipse.jetty:jetty-webapp:9.4.53.v20231009
353+
org.eclipse.jetty:jetty-xml:9.4.53.v20231009
354+
org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.53.v20231009
355+
org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.53.v20231009
356356
org.ehcache:ehcache:3.3.1
357357
org.ini4j:ini4j:0.5.4
358358
org.lz4:lz4-java:1.7.1

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -660,6 +660,7 @@ public Void run() throws Exception {
660660
private void doGracefulFailover()
661661
throws ServiceFailedException, IOException, InterruptedException {
662662
int timeout = FailoverController.getGracefulFenceTimeout(conf) * 2;
663+
Preconditions.checkArgument(timeout >= 0, "timeout should be non-negative.");
663664

664665
// Phase 1: pre-flight checks
665666
checkEligibleForFailover();

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ public void testOfflineImageViewer() throws Exception {
257257
FSImageTestUtil.getFSImage(
258258
cluster.getNameNode()).getStorage().getStorageDir(0));
259259
assertNotNull("Didn't generate or can't find fsimage", originalFsimage);
260-
PrintStream o = new PrintStream(NullOutputStream.NULL_OUTPUT_STREAM);
260+
PrintStream o = new PrintStream(NullOutputStream.INSTANCE);
261261
PBImageXmlWriter v = new PBImageXmlWriter(new Configuration(), o);
262262
v.visit(new RandomAccessFile(originalFsimage, "r"));
263263
}

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -405,7 +405,7 @@ private static FileStatus pathToFileEntry(FileSystem hdfs, String file)
405405
@Test(expected = IOException.class)
406406
public void testTruncatedFSImage() throws IOException {
407407
File truncatedFile = new File(tempDir, "truncatedFsImage");
408-
PrintStream output = new PrintStream(NullOutputStream.NULL_OUTPUT_STREAM);
408+
PrintStream output = new PrintStream(NullOutputStream.INSTANCE);
409409
copyPartOfFile(originalFsimage, truncatedFile);
410410
try (RandomAccessFile r = new RandomAccessFile(truncatedFile, "r")) {
411411
new FileDistributionCalculator(new Configuration(), 0, 0, false, output)

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java

Lines changed: 28 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -955,7 +955,10 @@ public static class MapOutputBuffer<K extends Object, V extends Object>
955955
new ArrayList<SpillRecord>();
956956
private int totalIndexCacheMemory;
957957
private int indexCacheMemoryLimit;
958+
private int spillFilesCountLimit;
958959
private static final int INDEX_CACHE_MEMORY_LIMIT_DEFAULT = 1024 * 1024;
960+
private static final int SPILL_FILES_COUNT_LIMIT_DEFAULT = -1;
961+
private static final int SPILL_FILES_COUNT_UNBOUNDED_LIMIT_VALUE = -1;
959962

960963
private MapTask mapTask;
961964
private MapOutputFile mapOutputFile;
@@ -984,10 +987,17 @@ public void init(MapOutputCollector.Context context
984987
MRJobConfig.DEFAULT_IO_SORT_MB);
985988
indexCacheMemoryLimit = job.getInt(JobContext.INDEX_CACHE_MEMORY_LIMIT,
986989
INDEX_CACHE_MEMORY_LIMIT_DEFAULT);
990+
spillFilesCountLimit = job.getInt(JobContext.SPILL_FILES_COUNT_LIMIT,
991+
SPILL_FILES_COUNT_LIMIT_DEFAULT);
987992
if (spillper > (float)1.0 || spillper <= (float)0.0) {
988993
throw new IOException("Invalid \"" + JobContext.MAP_SORT_SPILL_PERCENT +
989994
"\": " + spillper);
990995
}
996+
if(spillFilesCountLimit != SPILL_FILES_COUNT_UNBOUNDED_LIMIT_VALUE
997+
&& spillFilesCountLimit < 0) {
998+
throw new IOException("Invalid value for \"" + JobContext.SPILL_FILES_COUNT_LIMIT + "\", " +
999+
"current value: " + spillFilesCountLimit);
1000+
}
9911001
if ((sortmb & 0x7FF) != sortmb) {
9921002
throw new IOException(
9931003
"Invalid \"" + JobContext.IO_SORT_MB + "\": " + sortmb);
@@ -1698,7 +1708,7 @@ private void sortAndSpill() throws IOException, ClassNotFoundException,
16981708
spillRec.size() * MAP_OUTPUT_INDEX_RECORD_LENGTH;
16991709
}
17001710
LOG.info("Finished spill " + numSpills);
1701-
++numSpills;
1711+
incrementNumSpills();
17021712
} finally {
17031713
if (out != null) out.close();
17041714
if (partitionOut != null) {
@@ -1774,7 +1784,7 @@ private void spillSingleRecord(final K key, final V value,
17741784
totalIndexCacheMemory +=
17751785
spillRec.size() * MAP_OUTPUT_INDEX_RECORD_LENGTH;
17761786
}
1777-
++numSpills;
1787+
incrementNumSpills();
17781788
} finally {
17791789
if (out != null) out.close();
17801790
if (partitionOut != null) {
@@ -2022,14 +2032,29 @@ private void sameVolRename(Path srcPath,
20222032
if (!dst.getParentFile().exists()) {
20232033
if (!dst.getParentFile().mkdirs()) {
20242034
throw new IOException("Unable to rename " + src + " to "
2025-
+ dst + ": couldn't create parent directory");
2035+
+ dst + ": couldn't create parent directory");
20262036
}
20272037
}
20282038

20292039
if (!src.renameTo(dst)) {
20302040
throw new IOException("Unable to rename " + src + " to " + dst);
20312041
}
20322042
}
2043+
2044+
/**
2045+
* Increments numSpills local counter by taking into consideration
2046+
* the max limit on spill files being generated by the job.
2047+
* If limit is reached, this function throws an IOException
2048+
*/
2049+
private void incrementNumSpills() throws IOException {
2050+
++numSpills;
2051+
if(spillFilesCountLimit != SPILL_FILES_COUNT_UNBOUNDED_LIMIT_VALUE
2052+
&& numSpills > spillFilesCountLimit) {
2053+
throw new IOException("Too many spill files got created, control it with " +
2054+
"mapreduce.task.spill.files.count.limit, current value: " + spillFilesCountLimit +
2055+
", current spill count: " + numSpills);
2056+
}
2057+
}
20332058
} // MapOutputBuffer
20342059

20352060
/**

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -323,6 +323,7 @@ public interface MRJobConfig {
323323
public static final int DEFAULT_IO_SORT_MB = 100;
324324

325325
public static final String INDEX_CACHE_MEMORY_LIMIT = "mapreduce.task.index.cache.limit.bytes";
326+
String SPILL_FILES_COUNT_LIMIT = "mapreduce.task.spill.files.count.limit";
326327

327328
public static final String PRESERVE_FAILED_TASK_FILES = "mapreduce.task.files.preserve.failedtasks";
328329

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,15 @@
6262
set to less than .5</description>
6363
</property>
6464

65+
<property>
66+
<name>mapreduce.task.spill.files.count.limit</name>
67+
<value>-1</value>
68+
<description>Number of spill files that can be created by a MapTask.
69+
After breaching this, task will fail. Default value for this config is -1
70+
which indicates that there is no limit on number of spill files being
71+
created</description>
72+
</property>
73+
6574
<property>
6675
<name>mapreduce.job.local-fs.single-disk-limit.bytes</name>
6776
<value>-1</value>

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMapTask.java

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,14 +27,20 @@
2727
import org.apache.hadoop.mapred.MapTask.MapOutputBuffer;
2828
import org.apache.hadoop.mapred.Task.TaskReporter;
2929
import org.apache.hadoop.mapreduce.MRConfig;
30+
import org.apache.hadoop.mapreduce.MRJobConfig;
3031
import org.apache.hadoop.mapreduce.TaskCounter;
3132
import org.apache.hadoop.mapreduce.TaskType;
3233
import org.apache.hadoop.util.Progress;
3334
import org.junit.After;
3435
import org.junit.Assert;
36+
import org.junit.Rule;
3537
import org.junit.Test;
38+
import org.junit.rules.ExpectedException;
3639

3740
import java.io.File;
41+
import java.io.IOException;
42+
import java.lang.reflect.InvocationTargetException;
43+
import java.lang.reflect.Method;
3844

3945
import static org.mockito.ArgumentMatchers.any;
4046
import static org.mockito.Mockito.doReturn;
@@ -51,6 +57,9 @@ public void cleanup() throws Exception {
5157
FileUtil.fullyDelete(TEST_ROOT_DIR);
5258
}
5359

60+
@Rule
61+
public ExpectedException exception = ExpectedException.none();
62+
5463
// Verify output files for shuffle have group read permission even when
5564
// the configured umask normally would prevent it.
5665
@Test
@@ -84,4 +93,73 @@ public void testShufflePermissions() throws Exception {
8493
Assert.assertEquals("Incorrect index file perms",
8594
(short)0640, perms.toShort());
8695
}
96+
97+
@Test
98+
public void testSpillFilesCountLimitInvalidValue() throws Exception {
99+
JobConf conf = new JobConf();
100+
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
101+
conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath());
102+
conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, -2);
103+
MapOutputFile mof = new MROutputFiles();
104+
mof.setConf(conf);
105+
TaskAttemptID attemptId = new TaskAttemptID("12345", 1, TaskType.MAP, 1, 1);
106+
MapTask mockTask = mock(MapTask.class);
107+
doReturn(mof).when(mockTask).getMapOutputFile();
108+
doReturn(attemptId).when(mockTask).getTaskID();
109+
doReturn(new Progress()).when(mockTask).getSortPhase();
110+
TaskReporter mockReporter = mock(TaskReporter.class);
111+
doReturn(new Counter()).when(mockReporter).getCounter(any(TaskCounter.class));
112+
MapOutputCollector.Context ctx = new MapOutputCollector.Context(mockTask, conf, mockReporter);
113+
MapOutputBuffer<Object, Object> mob = new MapOutputBuffer<>();
114+
115+
exception.expect(IOException.class);
116+
exception.expectMessage("Invalid value for \"mapreduce.task.spill.files.count.limit\", " +
117+
"current value: -2");
118+
119+
mob.init(ctx);
120+
mob.close();
121+
}
122+
123+
@Test
124+
public void testSpillFilesCountBreach() throws Exception {
125+
JobConf conf = new JobConf();
126+
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
127+
conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath());
128+
conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, 2);
129+
MapOutputFile mof = new MROutputFiles();
130+
mof.setConf(conf);
131+
TaskAttemptID attemptId = new TaskAttemptID("12345", 1, TaskType.MAP, 1, 1);
132+
MapTask mockTask = mock(MapTask.class);
133+
doReturn(mof).when(mockTask).getMapOutputFile();
134+
doReturn(attemptId).when(mockTask).getTaskID();
135+
doReturn(new Progress()).when(mockTask).getSortPhase();
136+
TaskReporter mockReporter = mock(TaskReporter.class);
137+
doReturn(new Counter()).when(mockReporter).getCounter(any(TaskCounter.class));
138+
MapOutputCollector.Context ctx = new MapOutputCollector.Context(mockTask, conf, mockReporter);
139+
MapOutputBuffer<Object, Object> mob = new MapOutputBuffer<>();
140+
mob.numSpills = 2;
141+
mob.init(ctx);
142+
143+
Method method = mob.getClass().getDeclaredMethod("incrementNumSpills");
144+
method.setAccessible(true);
145+
boolean gotExceptionWithMessage = false;
146+
try {
147+
method.invoke(mob);
148+
} catch(InvocationTargetException e) {
149+
Throwable targetException = e.getTargetException();
150+
if (targetException != null) {
151+
String errorMessage = targetException.getMessage();
152+
if (errorMessage != null) {
153+
if(errorMessage.equals("Too many spill files got created, control it with " +
154+
"mapreduce.task.spill.files.count.limit, current value: 2, current spill count: 3")) {
155+
gotExceptionWithMessage = true;
156+
}
157+
}
158+
}
159+
}
160+
161+
mob.close();
162+
163+
Assert.assertTrue(gotExceptionWithMessage);
164+
}
87165
}

hadoop-project/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
<!--Whether to proceed to next module if any test failures exist-->
3838
<maven.test.failure.ignore>true</maven.test.failure.ignore>
3939
<maven.test.redirectTestOutputToFile>true</maven.test.redirectTestOutputToFile>
40-
<jetty.version>9.4.51.v20230217</jetty.version>
40+
<jetty.version>9.4.53.v20231009</jetty.version>
4141
<test.exclude>_</test.exclude>
4242
<test.exclude.pattern>_</test.exclude.pattern>
4343

hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@
5454

5555
import java.io.File;
5656
import java.io.IOException;
57+
import java.nio.charset.StandardCharsets;
5758
import java.util.ArrayList;
5859
import java.util.Arrays;
5960
import java.util.Collections;
@@ -505,7 +506,10 @@ void generateScript(File localScript) throws IOException {
505506
String classpath = halrJarPath + File.pathSeparator + harJarPath;
506507
FileWriterWithEncoding fw = null;
507508
try {
508-
fw = new FileWriterWithEncoding(localScript, "UTF-8");
509+
fw = FileWriterWithEncoding.builder()
510+
.setFile(localScript)
511+
.setCharset(StandardCharsets.UTF_8)
512+
.get();
509513
fw.write("#!/bin/bash\nset -e\nset -x\n");
510514
int containerCount = 1;
511515
for (AppInfo context : eligibleApplications) {

0 commit comments

Comments
 (0)