Skip to content

Commit 1a49991

Browse files
authored
HDDS-11617. Update hadoop to 3.4.1 (apache#7376)
1 parent 9945de6 commit 1a49991

11 files changed

Lines changed: 79 additions & 32 deletions

File tree

hadoop-hdds/interface-client/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
4141
</dependency>
4242
<dependency>
4343
<groupId>org.apache.hadoop.thirdparty</groupId>
44-
<artifactId>hadoop-shaded-protobuf_3_7</artifactId>
44+
<artifactId>hadoop-shaded-protobuf_3_25</artifactId>
4545
</dependency>
4646
<dependency>
4747
<groupId>org.apache.ratis</groupId>

hadoop-hdds/test-utils/src/main/java/org/apache/ozone/test/MetricsAsserts.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,8 @@
5151
* Copied from Hadoop and migrated to AssertJ.
5252
*/
5353
public final class MetricsAsserts {
54-
54+
// workaround for HADOOP-19301.
55+
private static final MutableQuantiles QUANTILES = new MutableQuantiles();
5556
private static final Logger LOG = LoggerFactory.getLogger(MetricsAsserts.class);
5657
private static final Offset<Double> EPSILON = Offset.offset(0.00001);
5758
private static final Offset<Float> EPSILON_FLOAT = Offset.offset(0.00001f);
@@ -411,7 +412,7 @@ public static void assertQuantileGauges(String prefix,
411412
public static void assertQuantileGauges(String prefix,
412413
MetricsRecordBuilder rb, String valueName) {
413414
verify(rb).addGauge(eqName(info(prefix + "NumOps", "")), geq(0L));
414-
for (Quantile q : MutableQuantiles.quantiles) {
415+
for (Quantile q : QUANTILES.getQuantiles()) {
415416
String nameTemplate = prefix + "%dthPercentile" + valueName;
416417
int percentile = (int) (100 * q.quantile);
417418
verify(rb).addGauge(
@@ -432,7 +433,7 @@ public static void assertQuantileGauges(String prefix,
432433
public static void assertInverseQuantileGauges(String prefix,
433434
MetricsRecordBuilder rb, String valueName) {
434435
verify(rb).addGauge(eqName(info(prefix + "NumOps", "")), geq(0L));
435-
for (Quantile q : MutableQuantiles.quantiles) {
436+
for (Quantile q : QUANTILES.getQuantiles()) {
436437
String nameTemplate = prefix + "%dthInversePercentile" + valueName;
437438
int percentile = (int) (100 * q.quantile);
438439
verify(rb).addGauge(

hadoop-ozone/dist/src/main/compose/common/s3a-test.sh

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,10 +93,13 @@ execute_s3a_tests() {
9393
EOF
9494

9595
# Some tests are skipped due to known issues.
96+
# - ITestS3AContractBulkDelete: HDDS-11661
97+
# - ITestS3AContractCreate: HDDS-11663
9698
# - ITestS3AContractDistCp: HDDS-10616
99+
# - ITestS3AContractMkdirWithCreatePerf: HDDS-11662
97100
# - ITestS3AContractRename: HDDS-10665
98101
mvn -B -V --fail-never --no-transfer-progress \
99-
-Dtest='ITestS3AContract*, ITestS3ACommitterMRJob, !ITestS3AContractDistCp, !ITestS3AContractRename' \
102+
-Dtest='ITestS3AContract*, ITestS3ACommitterMRJob, !ITestS3AContractBulkDelete, !ITestS3AContractCreate#testOverwrite*EmptyDirectory[*], !ITestS3AContractDistCp, !ITestS3AContractMkdirWithCreatePerf, !ITestS3AContractRename' \
100103
clean test
101104

102105
local target="${RESULT_DIR}/junit/${bucket}/target"

hadoop-ozone/dist/src/main/license/jar-report.txt

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ share/ozone/lib/hadoop-common.jar
6363
share/ozone/lib/hadoop-hdfs-client.jar
6464
share/ozone/lib/hadoop-hdfs.jar
6565
share/ozone/lib/hadoop-shaded-guava.jar
66-
share/ozone/lib/hadoop-shaded-protobuf_3_7.jar
66+
share/ozone/lib/hadoop-shaded-protobuf_3_25.jar
6767
share/ozone/lib/hdds-annotation-processing.jar
6868
share/ozone/lib/hdds-client.jar
6969
share/ozone/lib/hdds-common.jar
@@ -134,6 +134,7 @@ share/ozone/lib/jersey-hk2.jar
134134
share/ozone/lib/jersey-media-jaxb.jar
135135
share/ozone/lib/jersey-media-json-jackson.jar
136136
share/ozone/lib/jersey-server.jar
137+
share/ozone/lib/jettison.jar
137138
share/ozone/lib/jetty-client.jar
138139
share/ozone/lib/jetty-http.jar
139140
share/ozone/lib/jetty-io.jar
@@ -202,6 +203,7 @@ share/ozone/lib/netty-tcnative-classes.Final.jar
202203
share/ozone/lib/netty-transport.Final.jar
203204
share/ozone/lib/netty-transport-classes-epoll.Final.jar
204205
share/ozone/lib/netty-transport-native-epoll.Final-linux-x86_64.jar
206+
share/ozone/lib/netty-transport-native-epoll.Final.jar
205207
share/ozone/lib/netty-transport-native-unix-common.Final.jar
206208
share/ozone/lib/nimbus-jose-jwt.jar
207209
share/ozone/lib/okhttp.jar

hadoop-ozone/dist/src/shell/conf/log4j.properties

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ hadoop.log.dir=.
2020
hadoop.log.file=hadoop.log
2121

2222
# Define the root logger to the system property "hadoop.root.logger".
23-
log4j.rootLogger=${hadoop.root.logger}, EventCounter
23+
log4j.rootLogger=${hadoop.root.logger}
2424

2525
# Logging Threshold
2626
log4j.threshold=ALL
@@ -129,13 +129,6 @@ log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
129129
log4j.logger.com.amazonaws.http.AmazonHttpClient=ERROR
130130
#log4j.logger.org.apache.hadoop.fs.s3a.S3AFileSystem=WARN
131131

132-
#
133-
# Event Counter Appender
134-
# Sends counts of logging messages at different severity levels to Hadoop Metrics.
135-
#
136-
log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
137-
138-
139132
# Log levels of third-party libraries
140133
log4j.logger.org.apache.commons.beanutils=WARN
141134

hadoop-ozone/integration-test/pom.xml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -263,6 +263,11 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
263263
<groupId>org.slf4j</groupId>
264264
<artifactId>jul-to-slf4j</artifactId>
265265
</dependency>
266+
<dependency>
267+
<groupId>org.assertj</groupId>
268+
<artifactId>assertj-core</artifactId>
269+
<version>${assertj.version}</version>
270+
</dependency>
266271
</dependencies>
267272

268273
<build>

hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/TestCSMMetrics.java

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -153,6 +153,14 @@ static void runContainerStateMachineMetrics(
153153
assertCounter("NumContainerNotOpenVerifyFailures", 0L, metric);
154154
assertCounter("WriteChunkMsNumOps", 1L, metric);
155155

156+
applyTransactionLatency = getDoubleGauge(
157+
"ApplyTransactionNsAvgTime", metric);
158+
assertThat(applyTransactionLatency).isGreaterThan(0.0);
159+
writeStateMachineLatency = getDoubleGauge(
160+
"WriteStateMachineDataNsAvgTime", metric);
161+
assertThat(writeStateMachineLatency).isGreaterThan(0.0);
162+
163+
156164
//Read Chunk
157165
ContainerProtos.ContainerCommandRequestProto readChunkRequest =
158166
ContainerTestHelper.getReadChunkRequest(pipeline, writeChunkRequest
@@ -165,12 +173,6 @@ static void runContainerStateMachineMetrics(
165173
RaftGroupId.valueOf(pipeline.getId().getId()));
166174
assertCounter("NumQueryStateMachineOps", 1L, metric);
167175
assertCounter("NumApplyTransactionOps", 1L, metric);
168-
applyTransactionLatency = getDoubleGauge(
169-
"ApplyTransactionNsAvgTime", metric);
170-
assertThat(applyTransactionLatency).isGreaterThan(0.0);
171-
writeStateMachineLatency = getDoubleGauge(
172-
"WriteStateMachineDataNsAvgTime", metric);
173-
assertThat(writeStateMachineLatency).isGreaterThan(0.0);
174176

175177
} finally {
176178
if (client != null) {

hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/tools/contract/AbstractContractDistCpTest.java

Lines changed: 41 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,11 @@
2929
import static org.assertj.core.api.Assertions.assertThat;
3030

3131
import java.io.IOException;
32+
import java.util.Arrays;
3233
import java.util.Collections;
3334
import java.util.HashMap;
3435
import java.util.Map;
36+
import java.util.stream.Collectors;
3537

3638
import org.apache.hadoop.conf.Configuration;
3739
import org.apache.hadoop.fs.FileStatus;
@@ -52,9 +54,10 @@
5254
import org.apache.hadoop.tools.DistCpOptions;
5355
import org.apache.hadoop.tools.SimpleCopyListing;
5456
import org.apache.hadoop.tools.mapred.CopyMapper;
55-
import org.apache.hadoop.tools.util.DistCpTestUtils;
57+
import org.apache.hadoop.util.ToolRunner;
5658
import org.apache.hadoop.util.functional.RemoteIterators;
5759

60+
import org.assertj.core.api.Assertions;
5861
import org.junit.jupiter.api.AfterEach;
5962
import org.junit.jupiter.api.BeforeEach;
6063
import org.junit.jupiter.api.Test;
@@ -534,8 +537,7 @@ public void testLargeFilesFromRemote() throws Exception {
534537
public void testSetJobId() throws Exception {
535538
describe("check jobId is set in the conf");
536539
remoteFS.create(new Path(remoteDir, "file1")).close();
537-
DistCpTestUtils
538-
.assertRunDistCp(DistCpConstants.SUCCESS, remoteDir.toString(),
540+
assertRunDistCp(DistCpConstants.SUCCESS, remoteDir.toString(),
539541
localDir.toString(), getDefaultCLIOptionsOrNull(), conf);
540542
assertThat(conf.get(CONF_LABEL_DISTCP_JOB_ID))
541543
.withFailMessage("DistCp job id isn't set")
@@ -719,7 +721,7 @@ public void testDistCpWithIterator() throws Exception {
719721
GenericTestUtils.LogCapturer.captureLogs(SimpleCopyListing.LOG);
720722

721723
String options = "-useiterator -update -delete" + getDefaultCLIOptions();
722-
DistCpTestUtils.assertRunDistCp(DistCpConstants.SUCCESS, source.toString(),
724+
assertRunDistCp(DistCpConstants.SUCCESS, source.toString(),
723725
dest.toString(), options, conf);
724726

725727
// Check the target listing was also done using iterator.
@@ -864,7 +866,7 @@ public void testDistCpWithFile() throws Exception {
864866
verifyPathExists(remoteFS, "", source);
865867
verifyPathExists(localFS, "", localDir);
866868

867-
DistCpTestUtils.assertRunDistCp(DistCpConstants.SUCCESS, source.toString(),
869+
assertRunDistCp(DistCpConstants.SUCCESS, source.toString(),
868870
dest.toString(), getDefaultCLIOptionsOrNull(), conf);
869871

870872
assertThat(RemoteIterators.toList(localFS.listFiles(dest, true)))
@@ -889,7 +891,7 @@ public void testDistCpWithUpdateExistFile() throws Exception {
889891

890892
verifyPathExists(remoteFS, "", source);
891893
verifyPathExists(localFS, "", dest);
892-
DistCpTestUtils.assertRunDistCp(DistCpConstants.SUCCESS, source.toString(),
894+
assertRunDistCp(DistCpConstants.SUCCESS, source.toString(),
893895
dest.toString(), "-delete -update" + getDefaultCLIOptions(), conf);
894896

895897
assertThat(RemoteIterators.toList(localFS.listFiles(dest, true)))
@@ -1015,4 +1017,37 @@ private void verifySkipAndCopyCounter(Job job,
10151017
.withFailMessage("Mismatch in SKIP counter value")
10161018
.isEqualTo(skipExpectedValue);
10171019
}
1020+
1021+
/**
1022+
* Runs distcp from src to dst, preserving XAttrs. Asserts the
1023+
* expected exit code.
1024+
*
1025+
* @param exitCode expected exit code
1026+
* @param src distcp src path
1027+
* @param dst distcp destination
1028+
* @param options distcp command line options
1029+
* @param conf Configuration to use
1030+
* @throws Exception if there is any error
1031+
*/
1032+
public static void assertRunDistCp(int exitCode, String src, String dst,
1033+
String options, Configuration conf)
1034+
throws Exception {
1035+
assertRunDistCp(exitCode, src, dst,
1036+
options == null ? new String[0] : options.trim().split(" "), conf);
1037+
}
1038+
1039+
private static void assertRunDistCp(int exitCode, String src, String dst,
1040+
String[] options, Configuration conf)
1041+
throws Exception {
1042+
DistCp distCp = new DistCp(conf, null);
1043+
String[] optsArr = new String[options.length + 2];
1044+
System.arraycopy(options, 0, optsArr, 0, options.length);
1045+
optsArr[optsArr.length - 2] = src;
1046+
optsArr[optsArr.length - 1] = dst;
1047+
1048+
Assertions.assertThat(ToolRunner.run(conf, distCp, optsArr))
1049+
.describedAs("Exit code of distcp %s",
1050+
Arrays.stream(optsArr).collect(Collectors.joining(" ")))
1051+
.isEqualTo(exitCode);
1052+
}
10181053
}

hadoop-ozone/interface-client/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
4949

5050
<dependency>
5151
<groupId>org.apache.hadoop.thirdparty</groupId>
52-
<artifactId>hadoop-shaded-protobuf_3_7</artifactId>
52+
<artifactId>hadoop-shaded-protobuf_3_25</artifactId>
5353
</dependency>
5454

5555
<dependency>

hadoop-ozone/ozonefs-shaded/pom.xml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,12 @@
7878
</exclusion>
7979
</exclusions>
8080
</dependency>
81+
<dependency>
82+
<groupId>com.google.protobuf</groupId>
83+
<artifactId>protobuf-java</artifactId>
84+
<version>2.5.0</version>
85+
<scope>compile</scope>
86+
</dependency>
8187
</dependencies>
8288
<build>
8389
<plugins>

0 commit comments

Comments
 (0)