Skip to content

Commit ac9ce85

Browse files
authored
[HUDI-4483] Fix checkstyle in integ-test module (apache#6523)
1 parent c50b634 commit ac9ce85

18 files changed

Lines changed: 41 additions & 73 deletions

hudi-integ-test/pom.xml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -445,7 +445,6 @@
445445
<dockerCompose.envFile>${project.basedir}/compose_env</dockerCompose.envFile>
446446
<dockerCompose.file>${project.basedir}/../docker/compose/docker-compose_hadoop284_hive233_spark244.yml</dockerCompose.file>
447447
<docker.compose.skip>${skipITs}</docker.compose.skip>
448-
<checkstyle.skip>true</checkstyle.skip>
449448
<main.basedir>${project.parent.basedir}</main.basedir>
450449
</properties>
451450

hudi-integ-test/src/main/java/org/apache/hudi/integ/testsuite/HoodieContinousTestSuiteWriter.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,6 @@
2222
import org.apache.hudi.common.model.HoodieRecord;
2323
import org.apache.hudi.common.util.Option;
2424
import org.apache.hudi.common.util.collection.Pair;
25-
import org.apache.hudi.config.HoodieWriteConfig;
26-
import org.apache.hudi.integ.testsuite.HoodieTestSuiteWriter;
2725
import org.apache.hudi.integ.testsuite.writer.DeltaWriteStats;
2826
import org.apache.hudi.utilities.schema.SchemaProvider;
2927

hudi-integ-test/src/main/java/org/apache/hudi/integ/testsuite/HoodieInlineTestSuiteWriter.java

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,8 @@
1919
package org.apache.hudi.integ.testsuite;
2020

2121
import org.apache.hudi.avro.model.HoodieCompactionPlan;
22-
import org.apache.hudi.client.HoodieReadClient;
2322
import org.apache.hudi.client.SparkRDDWriteClient;
2423
import org.apache.hudi.client.WriteStatus;
25-
import org.apache.hudi.client.common.HoodieSparkEngineContext;
2624
import org.apache.hudi.common.model.HoodieAvroRecord;
2725
import org.apache.hudi.common.model.HoodieCommitMetadata;
2826
import org.apache.hudi.common.model.HoodieRecord;
@@ -31,12 +29,7 @@
3129
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
3230
import org.apache.hudi.common.util.Option;
3331
import org.apache.hudi.common.util.collection.Pair;
34-
import org.apache.hudi.config.HoodieCompactionConfig;
35-
import org.apache.hudi.config.HoodieIndexConfig;
36-
import org.apache.hudi.config.HoodiePayloadConfig;
37-
import org.apache.hudi.config.HoodieWriteConfig;
3832
import org.apache.hudi.data.HoodieJavaRDD;
39-
import org.apache.hudi.index.HoodieIndex;
4033
import org.apache.hudi.integ.testsuite.HoodieTestSuiteJob.HoodieTestSuiteConfig;
4134
import org.apache.hudi.integ.testsuite.writer.DeltaWriteStats;
4235
import org.apache.hudi.table.HoodieSparkTable;
@@ -46,7 +39,6 @@
4639

4740
import org.apache.avro.Schema;
4841
import org.apache.avro.generic.GenericRecord;
49-
import org.apache.hadoop.conf.Configuration;
5042
import org.apache.spark.api.java.JavaRDD;
5143
import org.apache.spark.api.java.JavaSparkContext;
5244
import org.apache.spark.rdd.RDD;

hudi-integ-test/src/main/java/org/apache/hudi/integ/testsuite/HoodieMultiWriterTestSuiteJob.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@
3434
import java.util.concurrent.CompletableFuture;
3535
import java.util.concurrent.ExecutorService;
3636
import java.util.concurrent.Executors;
37-
import java.util.concurrent.TimeUnit;
3837
import java.util.concurrent.atomic.AtomicBoolean;
3938
import java.util.concurrent.atomic.AtomicInteger;
4039

@@ -134,7 +133,7 @@ public static void main(String[] args) throws Exception {
134133
AtomicBoolean jobFailed = new AtomicBoolean(false);
135134
AtomicInteger counter = new AtomicInteger(0);
136135
List<Long> waitTimes = new ArrayList<>();
137-
for (int i = 0;i < jobIndex ;i++) {
136+
for (int i = 0; i < jobIndex; i++) {
138137
if (i == 0) {
139138
waitTimes.add(0L);
140139
} else {

hudi-integ-test/src/main/java/org/apache/hudi/integ/testsuite/HoodieTestSuiteWriter.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ private boolean allowWriteClientAccess(DagNode dagNode) {
116116

117117
public abstract RDD<GenericRecord> getNextBatch() throws Exception;
118118

119-
public abstract Pair<SchemaProvider, Pair<String, JavaRDD<HoodieRecord>>> fetchSource() throws Exception ;
119+
public abstract Pair<SchemaProvider, Pair<String, JavaRDD<HoodieRecord>>> fetchSource() throws Exception;
120120

121121
public abstract Option<String> startCommit();
122122

@@ -132,7 +132,7 @@ private boolean allowWriteClientAccess(DagNode dagNode) {
132132

133133
public abstract JavaRDD<WriteStatus> compact(Option<String> instantTime) throws Exception;
134134

135-
public abstract void inlineClustering() throws Exception ;
135+
public abstract void inlineClustering() throws Exception;
136136

137137
public abstract Option<String> scheduleCompaction(Option<Map<String, String>> previousCommitExtraMetadata) throws Exception;
138138

hudi-integ-test/src/main/java/org/apache/hudi/integ/testsuite/SparkDataSourceContinuousIngestTool.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
import org.apache.hudi.client.common.HoodieSparkEngineContext;
2323
import org.apache.hudi.common.config.TypedProperties;
2424
import org.apache.hudi.common.fs.FSUtils;
25-
import org.apache.hudi.integ.testsuite.SparkDataSourceContinuousIngest;
2625
import org.apache.hudi.utilities.HoodieRepairTool;
2726
import org.apache.hudi.utilities.IdentitySplitter;
2827
import org.apache.hudi.utilities.UtilHelpers;

hudi-integ-test/src/main/java/org/apache/hudi/integ/testsuite/configuration/DFSDeltaConfig.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ public DFSDeltaConfig(DeltaOutputMode deltaOutputMode, DeltaInputType deltaInput
4646
SerializableConfiguration configuration,
4747
String deltaBasePath, String targetBasePath, String schemaStr, Long maxFileSize,
4848
int inputParallelism, boolean deleteOldInputData, boolean useHudiToGenerateUpdates) {
49-
super(deltaOutputMode, deltaInputType, configuration);
49+
super(deltaOutputMode, deltaInputType, configuration);
5050
this.deltaBasePath = deltaBasePath;
5151
this.schemaStr = schemaStr;
5252
this.maxFileSize = maxFileSize;

hudi-integ-test/src/main/java/org/apache/hudi/integ/testsuite/dag/DagUtils.java

Lines changed: 10 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -178,8 +178,7 @@ private static DagNode convertJsonToDagNode(JsonNode node, String type, String n
178178
DeltaConfig.Config config = DeltaConfig.Config.newBuilder().withConfigsMap(convertJsonNodeToMap(node))
179179
.withName(name).build();
180180
return (DagNode) ReflectionUtils.loadClass(generateFQN(type), config);
181-
}
182-
catch (ClassNotFoundException e) {
181+
} catch (ClassNotFoundException e) {
183182
throw new RuntimeException(e);
184183
}
185184
}
@@ -231,8 +230,7 @@ private static List<Pair<String, Integer>> getQueries(Entry<String, JsonNode> en
231230
List<JsonNode> flattened = new ArrayList<>();
232231
flattened.add(entry.getValue());
233232
queries = (List<Pair<String, Integer>>) getQueryMapper().readValue(flattened.toString(), List.class);
234-
}
235-
catch (Exception e) {
233+
} catch (Exception e) {
236234
e.printStackTrace();
237235
}
238236
return queries;
@@ -244,8 +242,7 @@ private static List<String> getQuerySessionProperties(Entry<String, JsonNode> en
244242
List<JsonNode> flattened = new ArrayList<>();
245243
flattened.add(entry.getValue());
246244
properties = (List<String>) getQueryEnginePropertyMapper().readValue(flattened.toString(), List.class);
247-
}
248-
catch (Exception e) {
245+
} catch (Exception e) {
249246
e.printStackTrace();
250247
}
251248
return properties;
@@ -254,20 +251,15 @@ private static List<String> getQuerySessionProperties(Entry<String, JsonNode> en
254251
private static Object getValue(JsonNode node) {
255252
if (node.isInt()) {
256253
return node.asInt();
257-
}
258-
else if (node.isLong()) {
254+
} else if (node.isLong()) {
259255
return node.asLong();
260-
}
261-
else if (node.isShort()) {
256+
} else if (node.isShort()) {
262257
return node.asInt();
263-
}
264-
else if (node.isBoolean()) {
258+
} else if (node.isBoolean()) {
265259
return node.asBoolean();
266-
}
267-
else if (node.isDouble()) {
260+
} else if (node.isDouble()) {
268261
return node.asDouble();
269-
}
270-
else if (node.isFloat()) {
262+
} else if (node.isFloat()) {
271263
return node.asDouble();
272264
}
273265
return node.textValue();
@@ -287,6 +279,7 @@ private static JsonNode createJsonNode(DagNode node, String type) throws IOExcep
287279
case HIVE_PROPERTIES:
288280
((ObjectNode) configNode).put(HIVE_PROPERTIES,
289281
MAPPER.readTree(getQueryEnginePropertyMapper().writeValueAsString(node.getConfig().getHiveProperties())));
282+
break;
290283
case PRESTO_QUERIES:
291284
((ObjectNode) configNode).put(PRESTO_QUERIES,
292285
MAPPER.readTree(getQueryMapper().writeValueAsString(node.getConfig().getHiveQueries())));
@@ -376,8 +369,7 @@ public List deserialize(JsonParser parser, DeserializationContext context) throw
376369

377370
if (fieldName.contains("query")) {
378371
query = parser.getValueAsString();
379-
}
380-
else if (fieldName.contains("result")) {
372+
} else if (fieldName.contains("result")) {
381373
result = parser.getValueAsInt();
382374
pairs.add(Pair.of(query, result));
383375
}

hudi-integ-test/src/main/java/org/apache/hudi/integ/testsuite/dag/nodes/BaseQueryNode.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,7 @@ public void executeAndValidateQueries(List<Pair<String, Integer>> queriesWithRes
4040
if (!res.next()) {
4141
log.info("res.next() was False - typically this means the query returned no rows.");
4242
assert 0 == queryAndResult.getRight();
43-
}
44-
else {
43+
} else {
4544
Integer result = res.getInt(1);
4645
if (!queryAndResult.getRight().equals(result)) {
4746
throw new AssertionError(

hudi-integ-test/src/main/java/org/apache/hudi/integ/testsuite/dag/nodes/BaseValidateDatasetNode.java

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -88,8 +88,8 @@ public abstract Dataset<Row> getDatasetToValidate(SparkSession session, Executio
8888
public void execute(ExecutionContext context, int curItrCount) throws Exception {
8989
int validateOnceEveryItr = config.validateOnceEveryIteration();
9090
int itrCountToExecute = config.getIterationCountToExecute();
91-
if ((itrCountToExecute != -1 && itrCountToExecute == curItrCount) ||
92-
(itrCountToExecute == -1 && ((curItrCount % validateOnceEveryItr) == 0))) {
91+
if ((itrCountToExecute != -1 && itrCountToExecute == curItrCount)
92+
|| (itrCountToExecute == -1 && ((curItrCount % validateOnceEveryItr) == 0))) {
9393
FileSystem fs = new Path(context.getHoodieTestSuiteWriter().getCfg().inputBasePath)
9494
.getFileSystem(context.getHoodieTestSuiteWriter().getConfiguration());
9595
if (context.getHoodieTestSuiteWriter().getCfg().testContinousMode) {
@@ -142,8 +142,8 @@ public void execute(ExecutionContext context, int curItrCount) throws Exception
142142
String tableName = context.getWriterContext().getProps().getString(DataSourceWriteOptions.HIVE_TABLE().key());
143143
log.warn("Validating hive table with db : " + database + " and table : " + tableName);
144144
session.sql("REFRESH TABLE " + database + "." + tableName);
145-
Dataset<Row> cowDf = session.sql("SELECT _row_key, rider, driver, begin_lat, begin_lon, end_lat, end_lon, fare, _hoodie_is_deleted, " +
146-
"test_suite_source_ordering_field FROM " + database + "." + tableName);
145+
Dataset<Row> cowDf = session.sql("SELECT _row_key, rider, driver, begin_lat, begin_lon, end_lat, end_lon, fare, _hoodie_is_deleted, "
146+
+ "test_suite_source_ordering_field FROM " + database + "." + tableName);
147147
Dataset<Row> reorderedInputDf = inputSnapshotDf.select("_row_key", "rider", "driver", "begin_lat", "begin_lon", "end_lat", "end_lon", "fare",
148148
"_hoodie_is_deleted", "test_suite_source_ordering_field");
149149

@@ -178,9 +178,9 @@ private void awaitUntilDeltaStreamerCaughtUp(ExecutionContext context, String hu
178178
FileStatus[] subDirs = fs.listStatus(new Path(inputPath));
179179
List<FileStatus> subDirList = Arrays.asList(subDirs);
180180
subDirList.sort(Comparator.comparingLong(entry -> Long.parseLong(entry.getPath().getName())));
181-
String latestSubDir = subDirList.get(subDirList.size() -1).getPath().getName();
182-
log.info("Latest sub directory in input path " + latestSubDir + ", latest checkpoint from deltastreamer " +
183-
(latestCheckpoint.isPresent() ? latestCheckpoint.get() : "none"));
181+
String latestSubDir = subDirList.get(subDirList.size() - 1).getPath().getName();
182+
log.info("Latest sub directory in input path " + latestSubDir + ", latest checkpoint from deltastreamer "
183+
+ (latestCheckpoint.isPresent() ? latestCheckpoint.get() : "none"));
184184
long maxWaitTime = config.maxWaitTimeForDeltastreamerToCatchupMs();
185185
long waitedSoFar = 0;
186186
while (!(latestCheckpoint.isPresent() && latestCheckpoint.get().equals(latestSubDir))) {
@@ -191,11 +191,11 @@ private void awaitUntilDeltaStreamerCaughtUp(ExecutionContext context, String hu
191191
latestCheckpoint = getLatestCheckpoint(commitTimeline);
192192
waitedSoFar += 20000;
193193
if (waitedSoFar >= maxWaitTime) {
194-
throw new AssertionError("DeltaStreamer has not caught up after 5 mins of wait time. Last known checkpoint " +
195-
(latestCheckpoint.isPresent() ? latestCheckpoint.get() : "none") + ", expected checkpoint to have caugth up " + latestSubDir);
194+
throw new AssertionError("DeltaStreamer has not caught up after 5 mins of wait time. Last known checkpoint "
195+
+ (latestCheckpoint.isPresent() ? latestCheckpoint.get() : "none") + ", expected checkpoint to have caugth up " + latestSubDir);
196196
}
197-
log.info("Latest sub directory in input path " + latestSubDir + ", latest checkpoint from deltastreamer " +
198-
(latestCheckpoint.isPresent() ? latestCheckpoint.get() : "none"));
197+
log.info("Latest sub directory in input path " + latestSubDir + ", latest checkpoint from deltastreamer "
198+
+ (latestCheckpoint.isPresent() ? latestCheckpoint.get() : "none"));
199199
}
200200
}
201201

@@ -223,7 +223,7 @@ private Dataset<Row> getInputDf(ExecutionContext context, SparkSession session,
223223
Dataset<Row> inputDf = session.read().format("avro").load(inputPath);
224224
Dataset<Row> trimmedDf = inputDf;
225225
if (!config.inputPartitonsToSkipWithValidate().isEmpty()) {
226-
trimmedDf = inputDf.filter("instr("+partitionPathField+", \'"+ config.inputPartitonsToSkipWithValidate() +"\') != 1");
226+
trimmedDf = inputDf.filter("instr(" + partitionPathField + ", \'" + config.inputPartitonsToSkipWithValidate() + "\') != 1");
227227
}
228228

229229
ExpressionEncoder encoder = getEncoder(inputDf.schema());

0 commit comments

Comments
 (0)