Skip to content

Commit 3bee6ad

Browse files
sashidharleventov
authored andcommitted
Use map.putIfAbsent() or map.computeIfAbsent() as appropriate instead of containsKey() + put() (#7764)
* #7316 Use Map.putIfAbsent() instead of containsKey() + put() * fixing indentation * Using map.computeIfAbsent() instead of map.putIfAbsent() where appropriate * fixing checkstyle * Changing the recommendation text * Reverting auto changes made by IDE * Implementing recommendation: A ConcurrentHashMap on which computeIfAbsent() is called should be assigned into variables of ConcurrentHashMap type, not ConcurrentMap * Removing unused import
1 parent 05346a9 commit 3bee6ad

File tree

13 files changed

+365
-384
lines changed

13 files changed

+365
-384
lines changed

.idea/inspectionProfiles/Druid.xml

Lines changed: 6 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -261,9 +261,7 @@ public PutObjectResult putObject(String bucketName, String key)
261261
@Override
262262
public PutObjectResult putObject(String bucketName, String key, File file)
263263
{
264-
if (!storage.containsKey(bucketName)) {
265-
storage.put(bucketName, new HashSet<>());
266-
}
264+
storage.putIfAbsent(bucketName, new HashSet<>());
267265
storage.get(bucketName).add(key);
268266
return new PutObjectResult();
269267
}

indexing-hadoop/src/main/java/org/apache/druid/indexer/DetermineHashedPartitionsJob.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -307,9 +307,7 @@ protected void innerMap(
307307
.getSegmentGranularity()
308308
.bucket(DateTimes.utc(inputRow.getTimestampFromEpoch()));
309309

310-
if (!hyperLogLogs.containsKey(interval)) {
311-
hyperLogLogs.put(interval, HyperLogLogCollector.makeLatestCollector());
312-
}
310+
hyperLogLogs.computeIfAbsent(interval, intv -> HyperLogLogCollector.makeLatestCollector());
313311
} else {
314312
final Optional<Interval> maybeInterval = config.getGranularitySpec()
315313
.bucketInterval(DateTimes.utc(inputRow.getTimestampFromEpoch()));

indexing-service/src/main/java/org/apache/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,8 @@ public Set<DataSegment> findUsedSegments(Set<SegmentIdWithShardSpec> identifiers
5050
// Group by dataSource
5151
final Map<String, Set<SegmentIdWithShardSpec>> identifiersByDataSource = new TreeMap<>();
5252
for (SegmentIdWithShardSpec identifier : identifiers) {
53-
if (!identifiersByDataSource.containsKey(identifier.getDataSource())) {
54-
identifiersByDataSource.put(identifier.getDataSource(), new HashSet<>());
55-
}
53+
identifiersByDataSource.computeIfAbsent(identifier.getDataSource(), k -> new HashSet<>());
54+
5655
identifiersByDataSource.get(identifier.getDataSource()).add(identifier);
5756
}
5857

indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -768,9 +768,7 @@ private Map<Interval, Optional<HyperLogLogCollector>> collectIntervalsAndShardSp
768768
}
769769

770770
if (determineNumPartitions) {
771-
if (!hllCollectors.containsKey(interval)) {
772-
hllCollectors.put(interval, Optional.of(HyperLogLogCollector.makeLatestCollector()));
773-
}
771+
hllCollectors.computeIfAbsent(interval, intv -> Optional.of(HyperLogLogCollector.makeLatestCollector()));
774772

775773
List<Object> groupKey = Rows.toGroupKey(
776774
queryGranularity.bucketStart(inputRow.getTimestamp()).getMillis(),
@@ -781,9 +779,7 @@ private Map<Interval, Optional<HyperLogLogCollector>> collectIntervalsAndShardSp
781779
} else {
782780
// we don't need to determine partitions but we still need to determine intervals, so add an Optional.absent()
783781
// for the interval and don't instantiate a HLL collector
784-
if (!hllCollectors.containsKey(interval)) {
785-
hllCollectors.put(interval, Optional.absent());
786-
}
782+
hllCollectors.putIfAbsent(interval, Optional.absent());
787783
}
788784
determinePartitionsMeters.incrementProcessed();
789785
}

indexing-service/src/main/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactory.java

Lines changed: 75 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -204,87 +204,87 @@ public Firehose connect(InputRowParser inputRowParser, File temporaryDirectory)
204204
segmentIds
205205
);
206206

207-
try {
208-
final List<TimelineObjectHolder<String, DataSegment>> timeLineSegments = getTimeline();
209-
210-
// Download all segments locally.
211-
// Note: this requires enough local storage space to fit all of the segments, even though
212-
// IngestSegmentFirehose iterates over the segments in series. We may want to change this
213-
// to download files lazily, perhaps sharing code with PrefetchableTextFilesFirehoseFactory.
214-
final SegmentLoader segmentLoader = segmentLoaderFactory.manufacturate(temporaryDirectory);
215-
Map<DataSegment, File> segmentFileMap = Maps.newLinkedHashMap();
216-
for (TimelineObjectHolder<String, DataSegment> holder : timeLineSegments) {
217-
for (PartitionChunk<DataSegment> chunk : holder.getObject()) {
218-
final DataSegment segment = chunk.getObject();
219-
if (!segmentFileMap.containsKey(segment)) {
220-
segmentFileMap.put(segment, segmentLoader.getSegmentFiles(segment));
207+
final List<TimelineObjectHolder<String, DataSegment>> timeLineSegments = getTimeline();
208+
209+
// Download all segments locally.
210+
// Note: this requires enough local storage space to fit all of the segments, even though
211+
// IngestSegmentFirehose iterates over the segments in series. We may want to change this
212+
// to download files lazily, perhaps sharing code with PrefetchableTextFilesFirehoseFactory.
213+
final SegmentLoader segmentLoader = segmentLoaderFactory.manufacturate(temporaryDirectory);
214+
Map<DataSegment, File> segmentFileMap = Maps.newLinkedHashMap();
215+
for (TimelineObjectHolder<String, DataSegment> holder : timeLineSegments) {
216+
for (PartitionChunk<DataSegment> chunk : holder.getObject()) {
217+
final DataSegment segment = chunk.getObject();
218+
219+
segmentFileMap.computeIfAbsent(segment, k -> {
220+
try {
221+
return segmentLoader.getSegmentFiles(segment);
221222
}
222-
}
223-
}
223+
catch (SegmentLoadingException e) {
224+
throw new RuntimeException(e);
225+
}
226+
});
224227

225-
final List<String> dims;
226-
if (dimensions != null) {
227-
dims = dimensions;
228-
} else if (inputRowParser.getParseSpec().getDimensionsSpec().hasCustomDimensions()) {
229-
dims = inputRowParser.getParseSpec().getDimensionsSpec().getDimensionNames();
230-
} else {
231-
dims = getUniqueDimensions(
232-
timeLineSegments,
233-
inputRowParser.getParseSpec().getDimensionsSpec().getDimensionExclusions()
234-
);
235228
}
229+
}
236230

237-
final List<String> metricsList = metrics == null ? getUniqueMetrics(timeLineSegments) : metrics;
231+
final List<String> dims;
232+
if (dimensions != null) {
233+
dims = dimensions;
234+
} else if (inputRowParser.getParseSpec().getDimensionsSpec().hasCustomDimensions()) {
235+
dims = inputRowParser.getParseSpec().getDimensionsSpec().getDimensionNames();
236+
} else {
237+
dims = getUniqueDimensions(
238+
timeLineSegments,
239+
inputRowParser.getParseSpec().getDimensionsSpec().getDimensionExclusions()
240+
);
241+
}
238242

239-
final List<WindowedStorageAdapter> adapters = Lists.newArrayList(
240-
Iterables.concat(
241-
Iterables.transform(
242-
timeLineSegments,
243-
new Function<TimelineObjectHolder<String, DataSegment>, Iterable<WindowedStorageAdapter>>()
244-
{
243+
final List<String> metricsList = metrics == null ? getUniqueMetrics(timeLineSegments) : metrics;
244+
245+
final List<WindowedStorageAdapter> adapters = Lists.newArrayList(
246+
Iterables.concat(
247+
Iterables.transform(
248+
timeLineSegments,
249+
new Function<TimelineObjectHolder<String, DataSegment>, Iterable<WindowedStorageAdapter>>() {
250+
@Override
251+
public Iterable<WindowedStorageAdapter> apply(final TimelineObjectHolder<String, DataSegment> holder)
252+
{
253+
return
254+
Iterables.transform(
255+
holder.getObject(),
256+
new Function<PartitionChunk<DataSegment>, WindowedStorageAdapter>() {
245257
@Override
246-
public Iterable<WindowedStorageAdapter> apply(final TimelineObjectHolder<String, DataSegment> holder)
258+
public WindowedStorageAdapter apply(final PartitionChunk<DataSegment> input)
247259
{
248-
return
249-
Iterables.transform(
250-
holder.getObject(),
251-
new Function<PartitionChunk<DataSegment>, WindowedStorageAdapter>()
252-
{
253-
@Override
254-
public WindowedStorageAdapter apply(final PartitionChunk<DataSegment> input)
255-
{
256-
final DataSegment segment = input.getObject();
257-
try {
258-
return new WindowedStorageAdapter(
259-
new QueryableIndexStorageAdapter(
260-
indexIO.loadIndex(
261-
Preconditions.checkNotNull(
262-
segmentFileMap.get(segment),
263-
"File for segment %s", segment.getId()
264-
)
265-
)
266-
),
267-
holder.getInterval()
268-
);
269-
}
270-
catch (IOException e) {
271-
throw new RuntimeException(e);
272-
}
273-
}
274-
}
275-
);
260+
final DataSegment segment = input.getObject();
261+
try {
262+
return new WindowedStorageAdapter(
263+
new QueryableIndexStorageAdapter(
264+
indexIO.loadIndex(
265+
Preconditions.checkNotNull(
266+
segmentFileMap.get(segment),
267+
"File for segment %s", segment.getId()
268+
)
269+
)
270+
),
271+
holder.getInterval()
272+
);
273+
}
274+
catch (IOException e) {
275+
throw new RuntimeException(e);
276+
}
276277
}
277278
}
278-
)
279-
)
280-
);
279+
);
280+
}
281+
}
282+
)
283+
)
284+
);
281285

282-
final TransformSpec transformSpec = TransformSpec.fromInputRowParser(inputRowParser);
283-
return new IngestSegmentFirehose(adapters, transformSpec, dims, metricsList, dimFilter);
284-
}
285-
catch (SegmentLoadingException e) {
286-
throw new RuntimeException(e);
287-
}
286+
final TransformSpec transformSpec = TransformSpec.fromInputRowParser(inputRowParser);
287+
return new IngestSegmentFirehose(adapters, transformSpec, dims, metricsList, dimFilter);
288288
}
289289

290290
private long jitter(long input)
@@ -508,13 +508,14 @@ static List<String> getUniqueMetrics(List<TimelineObjectHolder<String, DataSegme
508508
// segments to olders.
509509

510510
// timelineSegments are sorted in order of interval
511-
int index = 0;
511+
int[] index = {0};
512512
for (TimelineObjectHolder<String, DataSegment> timelineHolder : Lists.reverse(timelineSegments)) {
513513
for (PartitionChunk<DataSegment> chunk : timelineHolder.getObject()) {
514514
for (String metric : chunk.getObject().getMetrics()) {
515-
if (!uniqueMetrics.containsKey(metric)) {
516-
uniqueMetrics.put(metric, index++);
515+
uniqueMetrics.computeIfAbsent(metric, k -> {
516+
return index[0]++;
517517
}
518+
);
518519
}
519520
}
520521
}

0 commit comments

Comments
 (0)