Skip to content

Commit 9f284a0

Browse files
nsivabalancodope
authored andcommitted
[HUDI-4501] Throwing exception when restore is attempted with hoodie.arhive.beyond.savepoint is enabled (#6239)
1 parent 46200a1 commit 9f284a0

File tree

2 files changed

+60
-0
lines changed

2 files changed

+60
-0
lines changed

hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/BaseHoodieWriteClient.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@
6161
import org.apache.hudi.common.util.Option;
6262
import org.apache.hudi.common.util.ValidationUtils;
6363
import org.apache.hudi.common.util.collection.Pair;
64+
import org.apache.hudi.config.HoodieArchivalConfig;
6465
import org.apache.hudi.config.HoodieClusteringConfig;
6566
import org.apache.hudi.config.HoodieCompactionConfig;
6667
import org.apache.hudi.config.HoodieWriteConfig;
@@ -721,6 +722,8 @@ public void restoreToSavepoint(String savepointTime) {
721722

722723
HoodieTable<T, I, K, O> table = initTable(WriteOperationType.UNKNOWN, Option.empty(), initialMetadataTableIfNecessary);
723724
SavepointHelpers.validateSavepointPresence(table, savepointTime);
725+
ValidationUtils.checkArgument(!config.shouldArchiveBeyondSavepoint(), "Restore is not supported when " + HoodieArchivalConfig.ARCHIVE_BEYOND_SAVEPOINT.key()
726+
+ " is enabled");
724727
restoreToInstant(savepointTime, initialMetadataTableIfNecessary);
725728
SavepointHelpers.validateSavepointRestore(table, savepointTime);
726729
}

hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,7 @@
7171
import org.apache.hudi.common.util.Option;
7272
import org.apache.hudi.common.util.StringUtils;
7373
import org.apache.hudi.common.util.collection.Pair;
74+
import org.apache.hudi.config.HoodieArchivalConfig;
7475
import org.apache.hudi.config.HoodieCompactionConfig;
7576
import org.apache.hudi.config.HoodieIndexConfig;
7677
import org.apache.hudi.config.HoodieClusteringConfig;
@@ -675,6 +676,62 @@ private void testUpsertsInternal(HoodieWriteConfig config,
675676
}).collect();
676677
}
677678

679+
@Test
680+
public void testRestoreWithSavepointBeyondArchival() throws Exception {
681+
HoodieWriteConfig config = getConfigBuilder().withRollbackUsingMarkers(true).build();
682+
HoodieWriteConfig hoodieWriteConfig = getConfigBuilder(EAGER)
683+
.withRollbackUsingMarkers(true)
684+
.withArchivalConfig(HoodieArchivalConfig.newBuilder().withArchiveBeyondSavepoint(true).build())
685+
.withProps(config.getProps()).withTimelineLayoutVersion(
686+
VERSION_0).build();
687+
688+
HoodieTableMetaClient.withPropertyBuilder()
689+
.fromMetaClient(metaClient)
690+
.setTimelineLayoutVersion(VERSION_0)
691+
.setPopulateMetaFields(config.populateMetaFields())
692+
.initTable(metaClient.getHadoopConf(), metaClient.getBasePath());
693+
694+
SparkRDDWriteClient client = getHoodieWriteClient(hoodieWriteConfig);
695+
696+
// Write 1 (only inserts)
697+
String newCommitTime = "001";
698+
String initCommitTime = "000";
699+
int numRecords = 200;
700+
insertFirstBatch(hoodieWriteConfig, client, newCommitTime, initCommitTime, numRecords, SparkRDDWriteClient::insert,
701+
false, true, numRecords, config.populateMetaFields());
702+
703+
// Write 2 (updates)
704+
String prevCommitTime = newCommitTime;
705+
newCommitTime = "004";
706+
numRecords = 100;
707+
String commitTimeBetweenPrevAndNew = "002";
708+
updateBatch(hoodieWriteConfig, client, newCommitTime, prevCommitTime,
709+
Option.of(Arrays.asList(commitTimeBetweenPrevAndNew)), initCommitTime, numRecords, SparkRDDWriteClient::upsert, false, true,
710+
numRecords, 200, 2, config.populateMetaFields());
711+
712+
// Delete 1
713+
prevCommitTime = newCommitTime;
714+
newCommitTime = "005";
715+
numRecords = 50;
716+
717+
deleteBatch(hoodieWriteConfig, client, newCommitTime, prevCommitTime,
718+
initCommitTime, numRecords, SparkRDDWriteClient::delete, false, true,
719+
0, 150, config.populateMetaFields());
720+
721+
HoodieWriteConfig newConfig = getConfigBuilder().withProps(config.getProps()).withTimelineLayoutVersion(
722+
TimelineLayoutVersion.CURR_VERSION)
723+
.withArchivalConfig(HoodieArchivalConfig.newBuilder().withArchiveBeyondSavepoint(true).build()).build();
724+
client = getHoodieWriteClient(newConfig);
725+
726+
client.savepoint("004", "user1", "comment1");
727+
728+
// verify that restore fails when "hoodie.archive.beyond.savepoint" is enabled.
729+
SparkRDDWriteClient finalClient = client;
730+
assertThrows(IllegalArgumentException.class, () -> {
731+
finalClient.restoreToSavepoint("004");
732+
}, "Restore should not be supported when " + HoodieArchivalConfig.ARCHIVE_BEYOND_SAVEPOINT.key() + " is enabled");
733+
}
734+
678735
/**
679736
* Test Insert API for HoodieConcatHandle.
680737
*/

0 commit comments

Comments
 (0)