Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@

package org.apache.hudi.cli.commands;

import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.TableHeader;
Expand All @@ -35,6 +33,9 @@
import org.apache.hudi.metadata.HoodieBackedTableMetadata;
import org.apache.hudi.metadata.HoodieTableMetadata;
import org.apache.hudi.metadata.SparkHoodieBackedTableMetadataWriter;

import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.spark.api.java.JavaSparkContext;
Expand Down Expand Up @@ -122,7 +123,7 @@ public String create(
HoodieCLI.fs.mkdirs(metadataPath);
}

HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
HoodieWriteConfig writeConfig = getWriteConfig();
initJavaSparkContext(Option.of(master));
SparkHoodieBackedTableMetadataWriter.create(HoodieCLI.conf, writeConfig, new HoodieSparkEngineContext(jsc));
Expand Down Expand Up @@ -158,7 +159,7 @@ public String init(@ShellOption(value = "--sparkMaster", defaultValue = SparkUti
throw new RuntimeException("Metadata directory (" + metadataPath.toString() + ") does not exist.");
}

HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
if (!readOnly) {
HoodieWriteConfig writeConfig = getWriteConfig();
initJavaSparkContext(Option.of(master));
Expand Down Expand Up @@ -206,7 +207,7 @@ public String listPartitions(
return "[ERROR] Metadata Table not enabled/initialized\n\n";
}

HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
List<String> partitions = metadata.getAllPartitionPaths();
LOG.debug("Took " + timer.endTimer() + " ms");

Expand Down Expand Up @@ -239,7 +240,7 @@ public String listFiles(
partitionPath = new Path(HoodieCLI.basePath, partition);
}

HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
FileStatus[] statuses = metaReader.getAllFilesInPartition(partitionPath);
LOG.debug("Took " + timer.endTimer() + " ms");

Expand Down Expand Up @@ -271,7 +272,7 @@ public String validateFiles(
HoodieBackedTableMetadata fsMetaReader = new HoodieBackedTableMetadata(
new HoodieLocalEngineContext(HoodieCLI.conf), fsConfig, HoodieCLI.basePath, "/tmp");

HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
List<String> metadataPartitions = metadataReader.getAllPartitionPaths();
LOG.debug("Listing partitions Took " + timer.endTimer() + " ms");
List<String> fsPartitions = fsMetaReader.getAllPartitionPaths();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@

package org.apache.hudi.index;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.common.engine.HoodieEngineContext;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.FileSlice;
Expand All @@ -35,6 +33,9 @@
import org.apache.hudi.io.storage.HoodieFileReader;
import org.apache.hudi.io.storage.HoodieFileReaderFactory;
import org.apache.hudi.table.HoodieTable;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;

Expand Down Expand Up @@ -152,7 +153,7 @@ public static List<String> filterKeysFromFile(Path filePath, List<String> candid
try {
// Load all rowKeys from the file, to double-confirm
if (!candidateRecordKeys.isEmpty()) {
HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
HoodieFileReader fileReader = HoodieFileReaderFactory.getFileReader(configuration, filePath);
Set<String> fileRowKeys = fileReader.filterRowKeys(new TreeSet<>(candidateRecordKeys));
foundRecordKeys.addAll(fileRowKeys);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public HoodieKeyLookupHandle(HoodieWriteConfig config, HoodieTable<T, I, K, O> h

private BloomFilter getBloomFilter() {
BloomFilter bloomFilter = null;
HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
try {
if (config.getBloomIndexUseMetadata()
&& hoodieTable.getMetaClient().getTableConfig().getMetadataPartitions()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@

import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.HashMap;
import java.util.List;

import static org.apache.hudi.common.util.StringUtils.isNullOrEmpty;

Expand Down Expand Up @@ -124,7 +124,7 @@ protected HoodieWriteHandle(HoodieWriteConfig config, String instantTime, String
this.tableSchemaWithMetaFields = HoodieAvroUtils.addMetadataFields(tableSchema, config.allowOperationMetadataField());
this.writeSchema = overriddenSchema.orElseGet(() -> getWriteSchema(config));
this.writeSchemaWithMetaFields = HoodieAvroUtils.addMetadataFields(writeSchema, config.allowOperationMetadataField());
this.timer = new HoodieTimer().startTimer();
this.timer = HoodieTimer.start();
this.writeStatus = (WriteStatus) ReflectionUtils.loadClass(config.getWriteStatusClassName(),
!hoodieTable.getIndex().isImplicitWithStorage(), config.getWriteStatusFailureFraction());
this.taskContextSupplier = taskContextSupplier;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -382,7 +382,7 @@ public void initTableMetadata() {
protected <T extends SpecificRecordBase> void initializeIfNeeded(HoodieTableMetaClient dataMetaClient,
Option<T> actionMetadata,
Option<String> inflightInstantTimestamp) throws IOException {
HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();

boolean exists = metadataTableExists(dataMetaClient, actionMetadata);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@

package org.apache.hudi.table.action.clean;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import org.apache.hudi.avro.model.HoodieActionInstant;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieCleanerPlan;
Expand All @@ -43,6 +40,8 @@
import org.apache.hudi.table.HoodieTable;
import org.apache.hudi.table.action.BaseActionExecutor;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;

Expand Down Expand Up @@ -198,8 +197,7 @@ private HoodieCleanMetadata runClean(HoodieTable<T, I, K, O> table, HoodieInstan

HoodieInstant inflightInstant = null;
try {
final HoodieTimer timer = new HoodieTimer();
timer.startTimer();
final HoodieTimer timer = HoodieTimer.start();
if (cleanInstant.isRequested()) {
inflightInstant = table.getActiveTimeline().transitionCleanRequestedToInflight(cleanInstant,
TimelineMetadataUtils.serializeCleanerPlan(cleanerPlan));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,7 @@ public RunIndexActionExecutor(HoodieEngineContext context, HoodieWriteConfig con

@Override
public Option<HoodieIndexCommitMetadata> execute() {
HoodieTimer indexTimer = new HoodieTimer();
indexTimer.startTimer();
HoodieTimer indexTimer = HoodieTimer.start();

HoodieInstant indexInstant = validateAndGetIndexInstant();
// read HoodieIndexPlan
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,7 @@ public BaseRestoreActionExecutor(HoodieEngineContext context,

@Override
public HoodieRestoreMetadata execute() {
HoodieTimer restoreTimer = new HoodieTimer();
restoreTimer.startTimer();
HoodieTimer restoreTimer = HoodieTimer.start();

Option<HoodieInstant> restoreInstant = table.getRestoreTimeline()
.filterInflightsAndRequested()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ private HoodieRollbackMetadata runRollback(HoodieTable<T, I, K, O> table, Hoodie
? table.getActiveTimeline().transitionRollbackRequestedToInflight(rollbackInstant)
: rollbackInstant;

HoodieTimer rollbackTimer = new HoodieTimer().startTimer();
HoodieTimer rollbackTimer = HoodieTimer.start();
List<HoodieRollbackStat> stats = doRollbackAndGetStats(rollbackPlan);
HoodieRollbackMetadata rollbackMetadata = TimelineMetadataUtils.convertRollbackMetadata(
instantTime,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,7 @@ public CopyOnWriteRollbackActionExecutor(HoodieEngineContext context,

@Override
protected List<HoodieRollbackStat> executeRollback(HoodieRollbackPlan hoodieRollbackPlan) {
HoodieTimer rollbackTimer = new HoodieTimer();
rollbackTimer.startTimer();
HoodieTimer rollbackTimer = HoodieTimer.start();

List<HoodieRollbackStat> stats = new ArrayList<>();
HoodieActiveTimeline activeTimeline = table.getActiveTimeline();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,7 @@ public MergeOnReadRollbackActionExecutor(HoodieEngineContext context,

@Override
protected List<HoodieRollbackStat> executeRollback(HoodieRollbackPlan hoodieRollbackPlan) {
HoodieTimer rollbackTimer = new HoodieTimer();
rollbackTimer.startTimer();
HoodieTimer rollbackTimer = HoodieTimer.start();

LOG.info("Rolling back instant " + instantToRollback);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ protected Option<Path> create(String partitionPath, String dataFileName, IOType
}

private Option<Path> create(Path markerPath, boolean checkIfExists) {
HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
Path dirPath = markerPath.getParent();
try {
if (!fs.exists(dirPath)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ public Set<String> allMarkerFilePaths() {

@Override
protected Option<Path> create(String partitionPath, String dataFileName, IOType type, boolean checkIfExists) {
HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
String markerFileName = getMarkerFileName(dataFileName, type);

Map<String, String> paramsMap = new HashMap<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,7 @@ public HoodieRowDataCreateHandle(HoodieTable table, HoodieWriteConfig writeConfi
this.taskEpochId = taskEpochId;
this.fileId = fileId;
this.preserveHoodieMetadata = preserveHoodieMetadata;
this.currTimer = new HoodieTimer();
this.currTimer.startTimer();
this.currTimer = HoodieTimer.start();
this.fs = table.getMetaClient().getFs();
this.path = makeNewPath(partitionPath);
this.writeStatus = new HoodieInternalWriteStatus(!table.getIndex().isImplicitWithStorage(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ protected Set<String> getPartitionsModified(HoodieWriteMetadata<O> writeResult)
* Throw HoodieValidationException if any unexpected data is written (Example: data files are not readable for some reason).
*/
public void validate(String instantTime, HoodieWriteMetadata<O> writeResult, Dataset<Row> before, Dataset<Row> after) throws HoodieValidationException {
HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
try {
validateRecordsBeforeAndAfter(before, after, getPartitionsModified(writeResult));
} finally {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,6 @@

package org.apache.hudi.table.action.commit;

import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.avro.model.HoodieRequestedReplaceMetadata;
import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.common.data.HoodieData;
Expand All @@ -42,6 +36,14 @@
import org.apache.hudi.table.WorkloadStat;
import org.apache.hudi.table.action.HoodieWriteMetadata;

import org.apache.hadoop.fs.Path;

import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static org.apache.hudi.common.table.timeline.HoodieInstant.State.REQUESTED;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.REPLACE_COMMIT_ACTION;

Expand All @@ -59,7 +61,7 @@ public SparkDeletePartitionCommitActionExecutor(HoodieEngineContext context,
@Override
public HoodieWriteMetadata<HoodieData<WriteStatus>> execute() {
try {
HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
context.setJobStatus(this.getClass().getSimpleName(), "Gather all file ids from all deleting partitions.");
Map<String, List<String>> partitionToReplaceFileIds =
HoodieJavaPairRDD.getJavaPairRDD(context.parallelize(partitions).distinct()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2400,7 +2400,7 @@ private void validateMetadata(SparkRDDWriteClient testClient) throws IOException
return;
}

HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
HoodieSparkEngineContext engineContext = new HoodieSparkEngineContext(jsc);

// Partitions should match
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -578,7 +578,7 @@ public void validateMetadata(HoodieTestTable testTable, List<String> inflightCom
}
assertEquals(inflightCommits, testTable.inflightCommits());

HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
HoodieSparkEngineContext engineContext = new HoodieSparkEngineContext(jsc);

// Partitions should match
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.hadoop.CachingPath;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
Expand All @@ -49,7 +50,6 @@
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
import org.apache.hudi.hadoop.CachingPath;

import java.io.IOException;
import java.net.URI;
Expand Down Expand Up @@ -102,7 +102,7 @@ private static Registry getMetricRegistryForPath(Path p) {
}

protected static <R> R executeFuncWithTimeMetrics(String metricName, Path p, CheckedFunction<R> func) throws IOException {
HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
R res = func.get();

Registry registry = getMetricRegistryForPath(p);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ protected void refreshTimeline(HoodieTimeline visibleActiveTimeline) {
* Adds the provided statuses into the file system view, and also caches it inside this object.
*/
public List<HoodieFileGroup> addFilesToView(FileStatus[] statuses) {
HoodieTimer timer = new HoodieTimer().startTimer();
HoodieTimer timer = HoodieTimer.start();
List<HoodieFileGroup> fileGroups = buildFileGroups(statuses, visibleCommitsAndCompactionTimeline, true);
long fgBuildTimeTakenMs = timer.endTimer();
timer.startTimer();
Expand Down Expand Up @@ -216,8 +216,7 @@ protected List<HoodieFileGroup> buildFileGroups(Stream<HoodieBaseFile> baseFileS
* Get replaced instant for each file group by looking at all commit instants.
*/
private void resetFileGroupsReplaced(HoodieTimeline timeline) {
HoodieTimer hoodieTimer = new HoodieTimer();
hoodieTimer.startTimer();
HoodieTimer hoodieTimer = HoodieTimer.start();
// for each REPLACE instant, get map of (partitionPath -> deleteFileGroup)
HoodieTimeline replacedTimeline = timeline.getCompletedReplaceTimeline();
Stream<Map.Entry<HoodieFileGroupId, HoodieInstant>> resultStream = replacedTimeline.getInstants().flatMap(instant -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -334,8 +334,7 @@ public <K extends Serializable, T extends Serializable> T get(String columnFamil
*/
public <T extends Serializable> Stream<Pair<String, T>> prefixSearch(String columnFamilyName, String prefix) {
ValidationUtils.checkArgument(!closed);
final HoodieTimer timer = new HoodieTimer();
timer.startTimer();
final HoodieTimer timer = HoodieTimer.start();
long timeTakenMicro = 0;
List<Pair<String, T>> results = new LinkedList<>();
try (final RocksIterator it = getRocksDB().newIterator(managedHandlesMap.get(columnFamilyName))) {
Expand Down
Loading