Skip to content

Commit ad3f2c8

Browse files
authored
Merge branch 'apache:trunk' into YARN-11326
2 parents 2505955 + 734f7ab commit ad3f2c8

523 files changed

Lines changed: 18293 additions & 5247 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.asf.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
# limitations under the License.
1515

1616
github:
17+
ghp_path: /
18+
ghp_branch: gh-pages
1719
enabled_merge_buttons:
1820
squash: true
1921
merge: false

.github/workflows/website.yml

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
# Licensed to the Apache Software Foundation (ASF) under one or more
2+
# contributor license agreements. See the NOTICE file distributed with
3+
# this work for additional information regarding copyright ownership.
4+
# The ASF licenses this file to You under the Apache License, Version 2.0
5+
# (the "License"); you may not use this file except in compliance with
6+
# the License. You may obtain a copy of the License at
7+
#
8+
# http://www.apache.org/licenses/LICENSE-2.0
9+
#
10+
# Unless required by applicable law or agreed to in writing, software
11+
# distributed under the License is distributed on an "AS IS" BASIS,
12+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
# See the License for the specific language governing permissions and
14+
# limitations under the License.
15+
16+
17+
name: website
18+
19+
# Controls when the action will run.
20+
on:
21+
push:
22+
branches: [ trunk ]
23+
24+
jobs:
25+
build:
26+
runs-on: ubuntu-latest
27+
steps:
28+
- name: Checkout Hadoop trunk
29+
uses: actions/checkout@v3
30+
with:
31+
repository: apache/hadoop
32+
- name: Set up JDK 8
33+
uses: actions/setup-java@v3
34+
with:
35+
java-version: '8'
36+
distribution: 'temurin'
37+
- name: Cache local Maven repository
38+
uses: actions/cache@v3
39+
with:
40+
path: ~/.m2/repository
41+
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
42+
restore-keys: |
43+
${{ runner.os }}-maven-
44+
- name: Build Hadoop maven plugins
45+
run: cd hadoop-maven-plugins && mvn --batch-mode install
46+
- name: Build Hadoop
47+
run: mvn clean install -DskipTests -DskipShade
48+
- name: Build document
49+
run: mvn clean site
50+
- name: Stage document
51+
run: mvn site:stage -DstagingDirectory=${GITHUB_WORKSPACE}/staging/
52+
- name: Deploy to GitHub Pages
53+
uses: peaceiris/actions-gh-pages@v3
54+
with:
55+
github_token: ${{ secrets.GITHUB_TOKEN }}
56+
publish_dir: ./staging/hadoop-project
57+
user_name: 'github-actions[bot]'
58+
user_email: 'github-actions[bot]@users.noreply.github.com'
59+

LICENSE-binary

Lines changed: 24 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -210,9 +210,9 @@ hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/nvd3-1.8.5.* (css and js
210210
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/AbstractFuture.java
211211
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/TimeoutFuture.java
212212

213-
com.aliyun:aliyun-java-sdk-core:3.4.0
214-
com.aliyun:aliyun-java-sdk-ecs:4.2.0
215-
com.aliyun:aliyun-java-sdk-ram:3.0.0
213+
com.aliyun:aliyun-java-sdk-core:4.5.10
214+
com.aliyun:aliyun-java-sdk-kms:2.11.0
215+
com.aliyun:aliyun-java-sdk-ram:3.1.0
216216
com.aliyun:aliyun-java-sdk-sts:3.0.0
217217
com.aliyun.oss:aliyun-sdk-oss:3.13.2
218218
com.amazonaws:aws-java-sdk-bundle:1.12.316
@@ -250,7 +250,6 @@ commons-codec:commons-codec:1.11
250250
commons-collections:commons-collections:3.2.2
251251
commons-daemon:commons-daemon:1.0.13
252252
commons-io:commons-io:2.8.0
253-
commons-logging:commons-logging:1.1.3
254253
commons-net:commons-net:3.9.0
255254
de.ruedigermoeller:fst:2.50
256255
io.grpc:grpc-api:1.26.0
@@ -260,7 +259,6 @@ io.grpc:grpc-netty:1.26.0
260259
io.grpc:grpc-protobuf:1.26.0
261260
io.grpc:grpc-protobuf-lite:1.26.0
262261
io.grpc:grpc-stub:1.26.0
263-
io.netty:netty:3.10.6.Final
264262
io.netty:netty-all:4.1.77.Final
265263
io.netty:netty-buffer:4.1.77.Final
266264
io.netty:netty-codec:4.1.77.Final
@@ -325,21 +323,21 @@ org.apache.htrace:htrace-core4:4.1.0-incubating
325323
org.apache.httpcomponents:httpclient:4.5.6
326324
org.apache.httpcomponents:httpcore:4.4.10
327325
org.apache.kafka:kafka-clients:2.8.2
328-
org.apache.kerby:kerb-admin:2.0.2
329-
org.apache.kerby:kerb-client:2.0.2
330-
org.apache.kerby:kerb-common:2.0.2
331-
org.apache.kerby:kerb-core:2.0.2
332-
org.apache.kerby:kerb-crypto:2.0.2
333-
org.apache.kerby:kerb-identity:2.0.2
334-
org.apache.kerby:kerb-server:2.0.2
335-
org.apache.kerby:kerb-simplekdc:2.0.2
336-
org.apache.kerby:kerb-util:2.0.2
337-
org.apache.kerby:kerby-asn1:2.0.2
338-
org.apache.kerby:kerby-config:2.0.2
339-
org.apache.kerby:kerby-pkix:2.0.2
340-
org.apache.kerby:kerby-util:2.0.2
341-
org.apache.kerby:kerby-xdr:2.0.2
342-
org.apache.kerby:token-provider:2.0.2
326+
org.apache.kerby:kerb-admin:2.0.3
327+
org.apache.kerby:kerb-client:2.0.3
328+
org.apache.kerby:kerb-common:2.0.3
329+
org.apache.kerby:kerb-core:2.0.3
330+
org.apache.kerby:kerb-crypto:2.0.3
331+
org.apache.kerby:kerb-identity:2.0.3
332+
org.apache.kerby:kerb-server:2.0.3
333+
org.apache.kerby:kerb-simplekdc:2.0.3
334+
org.apache.kerby:kerb-util:2.0.3
335+
org.apache.kerby:kerby-asn1:2.0.3
336+
org.apache.kerby:kerby-config:2.0.3
337+
org.apache.kerby:kerby-pkix:2.0.3
338+
org.apache.kerby:kerby-util:2.0.3
339+
org.apache.kerby:kerby-xdr:2.0.3
340+
org.apache.kerby:token-provider:2.0.3
343341
org.apache.solr:solr-solrj:8.8.2
344342
org.apache.yetus:audience-annotations:0.5.0
345343
org.apache.zookeeper:zookeeper:3.6.3
@@ -359,11 +357,14 @@ org.eclipse.jetty:jetty-xml:9.4.48.v20220622
359357
org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.48.v20220622
360358
org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.48.v20220622
361359
org.ehcache:ehcache:3.3.1
360+
org.ini4j:ini4j:0.5.4
361+
org.jetbrains.kotlin:kotlin-stdlib:1.4.10
362+
org.jetbrains.kotlin:kotlin-stdlib-common:1.4.10
362363
org.lz4:lz4-java:1.7.1
363364
org.objenesis:objenesis:2.6
364365
org.xerial.snappy:snappy-java:1.0.5
365366
org.yaml:snakeyaml:1.33
366-
org.wildfly.openssl:wildfly-openssl:1.0.7.Final
367+
org.wildfly.openssl:wildfly-openssl:1.1.3.Final
367368

368369

369370
--------------------------------------------------------------------------------
@@ -518,6 +519,8 @@ Eclipse Public License 1.0
518519
--------------------------
519520

520521
junit:junit:4.13.2
522+
org.jacoco:org.jacoco.agent:0.8.5
523+
521524

522525

523526
HSQL License

hadoop-common-project/hadoop-auth/src/site/markdown/Configuration.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ This filter must be configured in front of all the web application resources tha
2424

2525
The Hadoop Auth and dependent JAR files must be in the web application classpath (commonly the `WEB-INF/lib` directory).
2626

27-
Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define the SLF4J API dependency but it does not define the dependency on a concrete logging implementation, this must be addded explicitly to the web application. For example, if the web applicationan uses Log4j, the SLF4J-LOG4J12 and LOG4J jar files must be part part of the web application classpath as well as the Log4j configuration file.
27+
Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define the SLF4J API dependency but it does not define the dependency on a concrete logging implementation, this must be addded explicitly to the web application. For example, if the web applicationan uses Log4j, the SLF4J-LOG4J12 and LOG4J jar files must be part of the web application classpath as well as the Log4j configuration file.
2828

2929
### Common Configuration parameters
3030

hadoop-common-project/hadoop-common/pom.xml

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -180,11 +180,6 @@
180180
<artifactId>jersey-server</artifactId>
181181
<scope>compile</scope>
182182
</dependency>
183-
<dependency>
184-
<groupId>commons-logging</groupId>
185-
<artifactId>commons-logging</artifactId>
186-
<scope>compile</scope>
187-
</dependency>
188183
<dependency>
189184
<groupId>log4j</groupId>
190185
<artifactId>log4j</artifactId>
@@ -200,11 +195,6 @@
200195
<artifactId>assertj-core</artifactId>
201196
<scope>test</scope>
202197
</dependency>
203-
<dependency>
204-
<groupId>org.glassfish.grizzly</groupId>
205-
<artifactId>grizzly-http-servlet</artifactId>
206-
<scope>test</scope>
207-
</dependency>
208198
<dependency>
209199
<groupId>commons-beanutils</groupId>
210200
<artifactId>commons-beanutils</artifactId>

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -241,12 +241,15 @@ public synchronized void close() throws IOException {
241241
return;
242242
}
243243
try {
244-
flush();
245-
if (closeOutputStream) {
246-
super.close();
247-
codec.close();
244+
try {
245+
flush();
246+
} finally {
247+
if (closeOutputStream) {
248+
super.close();
249+
codec.close();
250+
}
251+
freeBuffers();
248252
}
249-
freeBuffers();
250253
} finally {
251254
closed = true;
252255
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -414,7 +414,14 @@ public Path getLocalPathForWrite(String pathStr, long size,
414414

415415
//build the "roulette wheel"
416416
for(int i =0; i < ctx.dirDF.length; ++i) {
417-
availableOnDisk[i] = ctx.dirDF[i].getAvailable();
417+
final DF target = ctx.dirDF[i];
418+
// attempt to recreate the dir so that getAvailable() is valid
419+
// if it fails, getAvailable() will return 0, so the dir will
420+
// be declared unavailable.
421+
// return value is logged at debug to keep spotbugs quiet.
422+
final boolean b = new File(target.getDirPath()).mkdirs();
423+
LOG.debug("mkdirs of {}={}", target, b);
424+
availableOnDisk[i] = target.getAvailable();
418425
totalAvailable += availableOnDisk[i];
419426
}
420427

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/prefetch/CachingBlockManager.java

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,12 @@ public void cancelPrefetches() {
302302

303303
private void read(BufferData data) throws IOException {
304304
synchronized (data) {
305-
readBlock(data, false, BufferData.State.BLANK);
305+
try {
306+
readBlock(data, false, BufferData.State.BLANK);
307+
} catch (IOException e) {
308+
LOG.error("error reading block {}", data.getBlockNumber(), e);
309+
throw e;
310+
}
306311
}
307312
}
308313

@@ -362,9 +367,6 @@ private void readBlock(BufferData data, boolean isPrefetch, BufferData.State...
362367
buffer.flip();
363368
data.setReady(expectedState);
364369
} catch (Exception e) {
365-
String message = String.format("error during readBlock(%s)", data.getBlockNumber());
366-
LOG.error(message, e);
367-
368370
if (isPrefetch && tracker != null) {
369371
tracker.failed();
370372
}
@@ -406,7 +408,8 @@ public Void get() {
406408
try {
407409
blockManager.prefetch(data, taskQueuedStartTime);
408410
} catch (Exception e) {
409-
LOG.error("error during prefetch", e);
411+
LOG.info("error prefetching block {}. {}", data.getBlockNumber(), e.getMessage());
412+
LOG.debug("error prefetching block {}", data.getBlockNumber(), e);
410413
}
411414
return null;
412415
}
@@ -493,7 +496,8 @@ private void addToCacheAndRelease(BufferData data, Future<Void> blockFuture,
493496
return;
494497
}
495498
} catch (Exception e) {
496-
LOG.error("error waiting on blockFuture: {}", data, e);
499+
LOG.info("error waiting on blockFuture: {}. {}", data, e.getMessage());
500+
LOG.debug("error waiting on blockFuture: {}", data, e);
497501
data.setDone();
498502
return;
499503
}
@@ -523,8 +527,8 @@ private void addToCacheAndRelease(BufferData data, Future<Void> blockFuture,
523527
data.setDone();
524528
} catch (Exception e) {
525529
numCachingErrors.incrementAndGet();
526-
String message = String.format("error adding block to cache after wait: %s", data);
527-
LOG.error(message, e);
530+
LOG.info("error adding block to cache after wait: {}. {}", data, e.getMessage());
531+
LOG.debug("error adding block to cache after wait: {}", data, e);
528532
data.setDone();
529533
}
530534

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/package-info.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,11 @@
1515
* See the License for the specific language governing permissions and
1616
* limitations under the License.
1717
*/
18+
19+
/**
20+
* Filesystem implementations that allow Hadoop to read directly from
21+
* the local file system.
22+
*/
1823
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
1924
@InterfaceStability.Unstable
2025
package org.apache.hadoop.fs.local;

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/package-info.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,10 @@
1515
* See the License for the specific language governing permissions and
1616
* limitations under the License.
1717
*/
18+
19+
/**
20+
* Support for the execution of a file system command.
21+
*/
1822
@InterfaceAudience.Private
1923
@InterfaceStability.Unstable
2024
package org.apache.hadoop.fs.shell;

0 commit comments

Comments
 (0)