Skip to content

Commit 33d151a

Browse files
author
wangzixuan.wzxuan
committed
fix
1 parent 1c877ea commit 33d151a

File tree

12 files changed

+18
-20
lines changed

12 files changed

+18
-20
lines changed

hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/execution/bulkinsert/RDDConsistentBucketPartitioner.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
import org.apache.hudi.common.config.SerializableSchema;
2323
import org.apache.hudi.common.fs.FSUtils;
2424
import org.apache.hudi.common.model.ConsistentHashingNode;
25-
import org.apache.hudi.common.model.HoodieAvroRecord;
2625
import org.apache.hudi.common.model.HoodieConsistentHashingMetadata;
2726
import org.apache.hudi.common.model.HoodieKey;
2827
import org.apache.hudi.common.model.HoodieRecord;
@@ -235,8 +234,8 @@ private JavaRDD<HoodieRecord<T>> doPartitionAndCustomColumnSort(JavaRDD<HoodieRe
235234
final String[] sortColumns = sortColumnNames;
236235
final SerializableSchema schema = new SerializableSchema(HoodieAvroUtils.addMetadataFields((new Schema.Parser().parse(table.getConfig().getSchema()))));
237236
Comparator<HoodieRecord<T>> comparator = (Comparator<HoodieRecord<T>> & Serializable) (t1, t2) -> {
238-
Object obj1 = HoodieAvroUtils.getRecordColumnValues((HoodieAvroRecord) t1, sortColumns, schema, consistentLogicalTimestampEnabled);
239-
Object obj2 = HoodieAvroUtils.getRecordColumnValues((HoodieAvroRecord)t2, sortColumns, schema, consistentLogicalTimestampEnabled);
237+
Object obj1 = t1.getRecordColumnValues(schema.get(), sortColumns, consistentLogicalTimestampEnabled);
238+
Object obj2 = t2.getRecordColumnValues(schema.get(), sortColumns, consistentLogicalTimestampEnabled);
240239
return ((Comparable) obj1).compareTo(obj2);
241240
};
242241

hudi-client/hudi-spark-client/src/main/java/org/apache/hudi/io/storage/HoodieSparkFileReader.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,6 @@ default ClosableIterator<HoodieRecord<InternalRow>> getRecordIterator(Schema rea
4444

4545
@Override
4646
default ClosableIterator<HoodieRecord<InternalRow>> getRecordIterator(Schema readerSchema, Schema requestedSchema) throws IOException {
47-
// TODO used in HoodieParquetDataBlock
48-
return getRecordIterator(readerSchema);
47+
return getRecordIterator(readerSchema, requestedSchema);
4948
}
5049
}

hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/clustering/plan/strategy/TestSparkConsistentBucketClusteringPlanStrategy.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
import org.apache.hudi.common.testutils.HoodieTestUtils;
3030
import org.apache.hudi.common.util.collection.Triple;
3131
import org.apache.hudi.config.HoodieIndexConfig;
32-
import org.apache.hudi.config.HoodieStorageConfig;
32+
import org.apache.hudi.common.config.HoodieStorageConfig;
3333
import org.apache.hudi.config.HoodieWriteConfig;
3434
import org.apache.hudi.index.HoodieIndex;
3535
import org.apache.hudi.index.bucket.ConsistentBucketIdentifier;

hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestSparkConsistentBucketClustering.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
import org.apache.hudi.config.HoodieClusteringConfig;
3737
import org.apache.hudi.config.HoodieCompactionConfig;
3838
import org.apache.hudi.config.HoodieIndexConfig;
39-
import org.apache.hudi.config.HoodieStorageConfig;
39+
import org.apache.hudi.common.config.HoodieStorageConfig;
4040
import org.apache.hudi.config.HoodieWriteConfig;
4141
import org.apache.hudi.exception.HoodieException;
4242
import org.apache.hudi.execution.bulkinsert.BulkInsertSortMode;

hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecord.java

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -102,10 +102,6 @@ public String getFieldName() {
102102
IntStream.range(0, HOODIE_META_COLUMNS.size()).mapToObj(idx -> Pair.of(HOODIE_META_COLUMNS.get(idx), idx))
103103
.collect(Collectors.toMap(Pair::getKey, Pair::getValue));
104104

105-
public static final Map<String, Integer> HOODIE_META_COLUMNS_NAME_TO_POS_WITH_OPERATION =
106-
IntStream.range(0, HOODIE_META_COLUMNS_WITH_OPERATION.size()).mapToObj(idx -> Pair.of(HOODIE_META_COLUMNS_WITH_OPERATION.get(idx), idx))
107-
.collect(Collectors.toMap(Pair::getKey, Pair::getValue));
108-
109105
public static int RECORD_KEY_META_FIELD_ORD = HOODIE_META_COLUMNS_NAME_TO_POS.get(RECORD_KEY_METADATA_FIELD);
110106
public static int PARTITION_PATH_META_FIELD_ORD = HOODIE_META_COLUMNS_NAME_TO_POS.get(PARTITION_PATH_METADATA_FIELD);
111107
public static int FILENAME_META_FIELD_ORD = HOODIE_META_COLUMNS_NAME_TO_POS.get(FILENAME_METADATA_FIELD);

hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieHFileDataBlock.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,14 @@
3333
import org.apache.hudi.avro.HoodieAvroUtils;
3434
import org.apache.hudi.common.fs.inline.InLineFSUtils;
3535
import org.apache.hudi.common.fs.inline.InLineFileSystem;
36+
import org.apache.hudi.common.model.HoodieAvroIndexedRecord;
3637
import org.apache.hudi.common.model.HoodieRecord;
38+
import org.apache.hudi.common.model.HoodieRecord.HoodieRecordType;
39+
import org.apache.hudi.common.table.log.block.HoodieLogBlock.HeaderMetadataType;
40+
import org.apache.hudi.common.table.log.block.HoodieLogBlock.HoodieLogBlockContentLocation;
41+
import org.apache.hudi.common.table.log.block.HoodieLogBlock.HoodieLogBlockType;
3742
import org.apache.hudi.common.util.ClosableIterator;
43+
import org.apache.hudi.common.util.MappingIterator;
3844
import org.apache.hudi.common.util.Option;
3945
import org.apache.hudi.common.util.StringUtils;
4046
import org.apache.hudi.common.util.ValidationUtils;

hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieParquetDataBlock.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ protected <T> ClosableIterator<HoodieRecord<T>> readRecordsFromBlockPayload(Hood
151151
blockContentLoc.getBlockSize());
152152

153153
ClosableIterator<HoodieRecord<T>> iterator = HoodieFileReaderFactory.getReaderFactory(type).getFileReader(inlineConf, inlineLogFilePath, PARQUET)
154-
.getRecordIterator(readerSchema, readerSchema);
154+
.getRecordIterator(readerSchema);
155155
return iterator;
156156
}
157157

hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/sink/clustering/ClusteringOperator.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
import org.apache.hudi.common.util.Option;
3232
import org.apache.hudi.common.util.StringUtils;
3333
import org.apache.hudi.common.util.collection.Pair;
34-
import org.apache.hudi.config.HoodieStorageConfig;
34+
import org.apache.hudi.common.config.HoodieStorageConfig;
3535
import org.apache.hudi.config.HoodieWriteConfig;
3636
import org.apache.hudi.configuration.FlinkOptions;
3737
import org.apache.hudi.configuration.OptionsResolver;

hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/util/StreamerUtil.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@
4848
import org.apache.hudi.config.HoodieLockConfig;
4949
import org.apache.hudi.config.HoodieMemoryConfig;
5050
import org.apache.hudi.config.HoodiePayloadConfig;
51-
import org.apache.hudi.config.HoodieStorageConfig;
5251
import org.apache.hudi.config.HoodieWriteConfig;
5352
import org.apache.hudi.configuration.FlinkOptions;
5453
import org.apache.hudi.configuration.HadoopConfigurations;

hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ object HoodieOptionConfig {
7777
.build()
7878

7979
val SQL_MERGER_STRATEGY: HoodieSQLOption[String] = buildConf()
80-
.withSqlKey("mergerSTRATEGY")
80+
.withSqlKey("mergerStrategy")
8181
.withHoodieKey(DataSourceWriteOptions.MERGER_STRATEGY.key)
8282
.withTableConfigKey(HoodieTableConfig.MERGER_STRATEGY.key)
8383
.defaultValue(StringUtils.DEFAULT_MERGER_STRATEGY_UUID)

0 commit comments

Comments
 (0)