diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/savepoint/SavepointHelpers.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/savepoint/SavepointHelpers.java index c12a4f169b9d0..f00cd87797f6b 100644 --- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/savepoint/SavepointHelpers.java +++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/table/action/savepoint/SavepointHelpers.java @@ -18,7 +18,6 @@ package org.apache.hudi.table.action.savepoint; -import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.table.timeline.HoodieInstant; import org.apache.hudi.common.table.timeline.HoodieTimeline; import org.apache.hudi.common.util.Option; @@ -33,9 +32,6 @@ public class SavepointHelpers { private static final Logger LOG = LogManager.getLogger(SavepointHelpers.class); public static void deleteSavepoint(HoodieTable table, String savepointTime) { - if (table.getMetaClient().getTableType() == HoodieTableType.MERGE_ON_READ) { - throw new UnsupportedOperationException("Savepointing is not supported or MergeOnRead table types"); - } HoodieInstant savePoint = new HoodieInstant(false, HoodieTimeline.SAVEPOINT_ACTION, savepointTime); boolean isSavepointPresent = table.getCompletedSavepointTimeline().containsInstant(savePoint); if (!isSavepointPresent) { diff --git a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java index de89affbfc693..66e21bf4c45c9 100644 --- a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java +++ b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java @@ -591,6 +591,9 @@ private void testUpsertsInternal(HoodieWriteConfig config, assertFalse(metaClient.reloadActiveTimeline().getRollbackTimeline().lastInstant().isPresent()); + client.deleteSavepoint("004"); + assertFalse(metaClient.reloadActiveTimeline().getSavePointTimeline().containsInstant("004")); + // Check the entire dataset has all records still String[] fullPartitionPaths = new String[dataGen.getPartitionPaths().length]; for (int i = 0; i < fullPartitionPaths.length; i++) { diff --git a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkMergeOnReadTableRollback.java b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkMergeOnReadTableRollback.java index 0a11425ec5b89..35d7b6329e262 100644 --- a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkMergeOnReadTableRollback.java +++ b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkMergeOnReadTableRollback.java @@ -579,6 +579,9 @@ void testRestoreWithCleanedUpCommits() throws Exception { HoodieTableFileSystemView tableView = getHoodieTableFileSystemView(metaClient, metaClient.getCommitTimeline().filterCompletedInstants(), allFiles); Stream dataFilesToRead = tableView.getLatestBaseFiles(); assertFalse(dataFilesToRead.anyMatch(file -> HoodieTimeline.compareTimestamps("002", HoodieTimeline.GREATER_THAN, file.getCommitTime()))); + + client.deleteSavepoint("002"); + assertFalse(metaClient.reloadActiveTimeline().getSavePointTimeline().containsInstant("002")); } }