Skip to content

Commit 89bb32b

Browse files
committed
[HUDI-4071] Remove default value for mandatory record key field
1 parent 851c6e1 commit 89bb32b

15 files changed

Lines changed: 41 additions & 46 deletions

File tree

hudi-client/hudi-client-common/src/main/java/org/apache/hudi/config/HoodieIndexConfig.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -676,10 +676,10 @@ private void validateBucketIndexConfig() {
676676
// check the bucket index hash field
677677
if (StringUtils.isNullOrEmpty(hoodieIndexConfig.getString(BUCKET_INDEX_HASH_FIELD))) {
678678
hoodieIndexConfig.setValue(BUCKET_INDEX_HASH_FIELD,
679-
hoodieIndexConfig.getStringOrDefault(KeyGeneratorOptions.RECORDKEY_FIELD_NAME));
679+
hoodieIndexConfig.getString(KeyGeneratorOptions.RECORDKEY_FIELD_NAME));
680680
} else {
681681
boolean valid = Arrays
682-
.stream(hoodieIndexConfig.getStringOrDefault(KeyGeneratorOptions.RECORDKEY_FIELD_NAME).split(","))
682+
.stream(hoodieIndexConfig.getString(KeyGeneratorOptions.RECORDKEY_FIELD_NAME).split(","))
683683
.collect(Collectors.toSet())
684684
.containsAll(Arrays.asList(hoodieIndexConfig.getString(BUCKET_INDEX_HASH_FIELD).split(",")));
685685
if (!valid) {

hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/index/TestHoodieIndexConfigs.java

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
import org.apache.hudi.index.hbase.SparkHoodieHBaseIndex;
3232
import org.apache.hudi.index.inmemory.HoodieInMemoryHashIndex;
3333
import org.apache.hudi.index.simple.HoodieSimpleIndex;
34+
import org.apache.hudi.keygen.constant.KeyGeneratorOptions;
3435

3536
import org.junit.jupiter.api.BeforeEach;
3637
import org.junit.jupiter.api.Test;
@@ -39,6 +40,7 @@
3940
import org.junit.jupiter.params.provider.EnumSource;
4041

4142
import java.nio.file.Path;
43+
import java.util.Properties;
4244

4345
import static org.junit.jupiter.api.Assertions.assertThrows;
4446
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -88,13 +90,15 @@ public void testCreateIndex(IndexType indexType) {
8890
assertTrue(SparkHoodieIndexFactory.createIndex(config) instanceof SparkHoodieHBaseIndex);
8991
break;
9092
case BUCKET:
93+
Properties props = new Properties();
94+
props.setProperty(KeyGeneratorOptions.RECORDKEY_FIELD_NAME.key(), "uuid");
9195
config = clientConfigBuilder.withPath(basePath)
92-
.withIndexConfig(indexConfigBuilder.withIndexType(IndexType.BUCKET)
96+
.withIndexConfig(indexConfigBuilder.fromProperties(props).withIndexType(IndexType.BUCKET)
9397
.withBucketIndexEngineType(HoodieIndex.BucketIndexEngineType.SIMPLE).build()).build();
9498
assertTrue(SparkHoodieIndexFactory.createIndex(config) instanceof HoodieSimpleBucketIndex);
9599

96100
config = HoodieWriteConfig.newBuilder().withPath(basePath)
97-
.withIndexConfig(indexConfigBuilder.withIndexType(IndexType.BUCKET)
101+
.withIndexConfig(indexConfigBuilder.fromProperties(props).withIndexType(IndexType.BUCKET)
98102
.withBucketIndexEngineType(HoodieIndex.BucketIndexEngineType.CONSISTENT_HASHING).build())
99103
.build();
100104
assertTrue(SparkHoodieIndexFactory.createIndex(config) instanceof HoodieSparkConsistentBucketIndex);

hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/index/bucket/TestHoodieSimpleBucketIndex.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@ public void testBucketIndexValidityCheck() {
8282
.withBucketIndexEngineType(HoodieIndex.BucketIndexEngineType.SIMPLE)
8383
.withBucketNum("8").build();
8484
});
85+
props.setProperty(KeyGeneratorOptions.RECORDKEY_FIELD_NAME.key(), "uuid");
8586
props.setProperty(HoodieIndexConfig.BUCKET_INDEX_HASH_FIELD.key(), "uuid");
8687
HoodieIndexConfig.newBuilder().fromProperties(props)
8788
.withIndexType(HoodieIndex.IndexType.BUCKET)

hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/action/commit/TestCopyOnWriteActionExecutor.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -145,18 +145,17 @@ private Properties makeIndexConfig(HoodieIndex.IndexType indexType) {
145145
Properties props = new Properties();
146146
HoodieIndexConfig.Builder indexConfig = HoodieIndexConfig.newBuilder()
147147
.withIndexType(indexType);
148-
props.putAll(indexConfig.build().getProps());
149148
if (indexType.equals(HoodieIndex.IndexType.BUCKET)) {
150149
props.setProperty(KeyGeneratorOptions.RECORDKEY_FIELD_NAME.key(), "_row_key");
151150
indexConfig.fromProperties(props)
152151
.withIndexKeyField("_row_key")
153152
.withBucketNum("1")
154153
.withBucketIndexEngineType(HoodieIndex.BucketIndexEngineType.SIMPLE);
155-
props.putAll(indexConfig.build().getProps());
156154
props.putAll(HoodieLayoutConfig.newBuilder().fromProperties(props)
157155
.withLayoutType(HoodieStorageLayout.LayoutType.BUCKET.name())
158156
.withLayoutPartitioner(SparkBucketIndexPartitioner.class.getName()).build().getProps());
159157
}
158+
props.putAll(indexConfig.build().getProps());
160159
return props;
161160
}
162161

hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkMergeOnReadTableCompaction.java

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,7 @@ public void setup() {
8686

8787
@Test
8888
public void testWriteDuringCompaction() throws IOException {
89+
Properties props = getPropertiesForKeyGen(true);
8990
HoodieWriteConfig config = HoodieWriteConfig.newBuilder()
9091
.forTable("test-trip-table")
9192
.withPath(basePath())
@@ -99,10 +100,8 @@ public void testWriteDuringCompaction() throws IOException {
99100
.withLayoutConfig(HoodieLayoutConfig.newBuilder()
100101
.withLayoutType(HoodieStorageLayout.LayoutType.BUCKET.name())
101102
.withLayoutPartitioner(SparkBucketIndexPartitioner.class.getName()).build())
102-
.withIndexConfig(HoodieIndexConfig.newBuilder().withIndexType(HoodieIndex.IndexType.BUCKET).withBucketNum("1").build())
103+
.withIndexConfig(HoodieIndexConfig.newBuilder().fromProperties(props).withIndexType(HoodieIndex.IndexType.BUCKET).withBucketNum("1").build())
103104
.build();
104-
105-
Properties props = getPropertiesForKeyGen(true);
106105
props.putAll(config.getProps());
107106

108107
metaClient = getHoodieMetaClient(HoodieTableType.MERGE_ON_READ, props);
@@ -131,6 +130,7 @@ public void testWriteDuringCompaction() throws IOException {
131130
@ParameterizedTest
132131
@MethodSource("writeLogTest")
133132
public void testWriteLogDuringCompaction(boolean enableMetadataTable, boolean enableTimelineServer) throws IOException {
133+
Properties props = getPropertiesForKeyGen(true);
134134
HoodieWriteConfig config = HoodieWriteConfig.newBuilder()
135135
.forTable("test-trip-table")
136136
.withPath(basePath())
@@ -144,10 +144,8 @@ public void testWriteLogDuringCompaction(boolean enableMetadataTable, boolean en
144144
.withLayoutConfig(HoodieLayoutConfig.newBuilder()
145145
.withLayoutType(HoodieStorageLayout.LayoutType.BUCKET.name())
146146
.withLayoutPartitioner(SparkBucketIndexPartitioner.class.getName()).build())
147-
.withIndexConfig(HoodieIndexConfig.newBuilder().withIndexType(HoodieIndex.IndexType.BUCKET).withBucketNum("1").build())
147+
.withIndexConfig(HoodieIndexConfig.newBuilder().fromProperties(props).withIndexType(HoodieIndex.IndexType.BUCKET).withBucketNum("1").build())
148148
.build();
149-
150-
Properties props = getPropertiesForKeyGen(true);
151149
props.putAll(config.getProps());
152150

153151
metaClient = getHoodieMetaClient(HoodieTableType.MERGE_ON_READ, props);

hudi-common/src/main/java/org/apache/hudi/common/config/ConfigProperty.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@
2727
import java.util.Collections;
2828
import java.util.HashSet;
2929
import java.util.List;
30+
import java.util.Objects;
3031
import java.util.Set;
3132
import java.util.function.Function;
32-
import java.util.Objects;
3333

3434
/**
3535
* ConfigProperty describes a configuration property. It contains the configuration
@@ -76,7 +76,7 @@ public String key() {
7676

7777
public T defaultValue() {
7878
if (defaultValue == null) {
79-
throw new HoodieException("There's no default value for this config");
79+
throw new HoodieException(String.format("There's no default value for this config: %s", key));
8080
}
8181
return defaultValue;
8282
}

hudi-common/src/main/java/org/apache/hudi/keygen/constant/KeyGeneratorOptions.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ public class KeyGeneratorOptions extends HoodieConfig {
4545

4646
public static final ConfigProperty<String> RECORDKEY_FIELD_NAME = ConfigProperty
4747
.key("hoodie.datasource.write.recordkey.field")
48-
.defaultValue("uuid")
48+
.noDefaultValue()
4949
.withDocumentation("Record key field. Value to be used as the `recordKey` component of `HoodieKey`.\n"
5050
+ "Actual value will be obtained by invoking .toString() on the field value. Nested fields can be specified using\n"
5151
+ "the dot notation eg: `a.b.c`");

hudi-examples/hudi-examples-spark/src/main/java/org/apache/hudi/examples/quickstart/HoodieSparkQuickstart.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,6 @@ public static void insertOverwriteData(SparkSession spark, JavaSparkContext jsc,
123123
.save(tablePath);
124124
}
125125

126-
127126
/**
128127
* Load the data files into a DataFrame.
129128
*/
@@ -185,7 +184,7 @@ public static void delete(SparkSession spark, String tablePath, String tableName
185184
df.write().format("org.apache.hudi")
186185
.options(QuickstartUtils.getQuickstartWriteConfigs())
187186
.option(HoodieWriteConfig.PRECOMBINE_FIELD_NAME.key(), "ts")
188-
.option(KeyGeneratorOptions.PARTITIONPATH_FIELD_NAME.key(), "uuid")
187+
.option(KeyGeneratorOptions.RECORDKEY_FIELD_NAME.key(), "uuid")
189188
.option(KeyGeneratorOptions.PARTITIONPATH_FIELD_NAME.key(), "partitionpath")
190189
.option(TBL_NAME.key(), tableName)
191190
.option("hoodie.datasource.write.operation", WriteOperationType.DELETE.value())

hudi-examples/hudi-examples-spark/src/test/java/org/apache/hudi/examples/quickstart/TestHoodieSparkQuickstart.java

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,8 @@
2121
import org.apache.hudi.client.HoodieReadClient;
2222
import org.apache.hudi.client.SparkRDDWriteClient;
2323
import org.apache.hudi.client.common.HoodieSparkEngineContext;
24-
import org.apache.hudi.common.model.HoodieAvroPayload;
25-
import org.apache.hudi.examples.common.HoodieExampleDataGenerator;
2624
import org.apache.hudi.testutils.providers.SparkProvider;
25+
2726
import org.apache.spark.SparkConf;
2827
import org.apache.spark.api.java.JavaSparkContext;
2928
import org.apache.spark.sql.SQLContext;
@@ -36,15 +35,7 @@
3635
import java.io.File;
3736
import java.nio.file.Paths;
3837

39-
import static org.apache.hudi.examples.quickstart.HoodieSparkQuickstart.delete;
40-
import static org.apache.hudi.examples.quickstart.HoodieSparkQuickstart.deleteByPartition;
41-
import static org.apache.hudi.examples.quickstart.HoodieSparkQuickstart.incrementalQuery;
42-
import static org.apache.hudi.examples.quickstart.HoodieSparkQuickstart.insertData;
43-
import static org.apache.hudi.examples.quickstart.HoodieSparkQuickstart.insertOverwriteData;
44-
import static org.apache.hudi.examples.quickstart.HoodieSparkQuickstart.pointInTimeQuery;
45-
import static org.apache.hudi.examples.quickstart.HoodieSparkQuickstart.queryData;
4638
import static org.apache.hudi.examples.quickstart.HoodieSparkQuickstart.runQuickstart;
47-
import static org.apache.hudi.examples.quickstart.HoodieSparkQuickstart.updateData;
4839

4940
public class TestHoodieSparkQuickstart implements SparkProvider {
5041
protected static HoodieSparkEngineContext context;

hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/DataSourceOptions.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -605,9 +605,6 @@ object DataSourceWriteOptions {
605605
val RECORDKEY_FIELD_OPT_KEY = KeyGeneratorOptions.RECORDKEY_FIELD_NAME.key()
606606
/** @deprecated Use {@link RECORDKEY_FIELD} and its methods instead */
607607
@Deprecated
608-
val DEFAULT_RECORDKEY_FIELD_OPT_VAL = RECORDKEY_FIELD.defaultValue()
609-
/** @deprecated Use {@link PARTITIONPATH_FIELD} and its methods instead */
610-
@Deprecated
611608
val PARTITIONPATH_FIELD_OPT_KEY = KeyGeneratorOptions.PARTITIONPATH_FIELD_NAME.key()
612609
/** @deprecated Use {@link PARTITIONPATH_FIELD} and its methods instead */
613610
@Deprecated
@@ -789,7 +786,7 @@ object DataSourceOptionsHelper {
789786
val partitionFields = props.getString(DataSourceWriteOptions.PARTITIONPATH_FIELD.key(), null)
790787
if (partitionFields != null) {
791788
val numPartFields = partitionFields.split(",").length
792-
val recordsKeyFields = props.getString(DataSourceWriteOptions.RECORDKEY_FIELD.key(), DataSourceWriteOptions.RECORDKEY_FIELD.defaultValue())
789+
val recordsKeyFields = props.getString(DataSourceWriteOptions.RECORDKEY_FIELD.key())
793790
val numRecordKeyFields = recordsKeyFields.split(",").length
794791
if (numPartFields == 1 && numRecordKeyFields == 1) {
795792
classOf[SimpleKeyGenerator].getName

0 commit comments

Comments
 (0)