Skip to content

Commit

Permalink
Remove Redundant ConcurrentHashMapLong (#76172) (#76524)
Browse files Browse the repository at this point in the history
This class is nothing but a noop wrapper around a normal CHM.
It does nothing, adds indirection and hides the fact that it's just a
standard CHM and has all the same boxing/unboxing under the hood.
  • Loading branch information
original-brownbear committed Aug 14, 2021
1 parent b658b24 commit 686557e
Show file tree
Hide file tree
Showing 7 changed files with 7 additions and 157 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -46,17 +46,6 @@ public static <K, V> ConcurrentMap<K, V> newConcurrentMap() {
return new ConcurrentHashMap<>();
}

/**
* Creates a new CHM with an aggressive concurrency level, aimed at highly updateable long living maps.
*/
public static <V> ConcurrentMapLong<V> newConcurrentMapLongWithAggressiveConcurrency() {
return new ConcurrentHashMapLong<>(ConcurrentCollections.<Long, V>newConcurrentMapWithAggressiveConcurrency());
}

public static <V> ConcurrentMapLong<V> newConcurrentMapLong() {
return new ConcurrentHashMapLong<>(ConcurrentCollections.<Long, V>newConcurrentMap());
}

public static <V> Set<V> newConcurrentSet() {
return Collections.newSetFromMap(ConcurrentCollections.<V, Boolean>newConcurrentMap());
}
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
Expand Down Expand Up @@ -221,7 +220,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv

private final AtomicLong idGenerator = new AtomicLong();

private final ConcurrentMapLong<ReaderContext> activeReaders = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency();
private final Map<Long, ReaderContext> activeReaders = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency();

private final MultiBucketConsumerService multiBucketConsumerService;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TaskTransportChannel;
Expand Down Expand Up @@ -77,7 +76,7 @@ public class TaskManager implements ClusterStateApplier {
private final List<String> taskHeaders;
private final ThreadPool threadPool;

private final ConcurrentMapLong<Task> tasks = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency();
private final Map<Long, Task> tasks = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency();

private final CancellableTasksTracker<CancellableTaskHolder> cancellableTasks
= new CancellableTasksTracker<>(new CancellableTaskHolder[0]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;

import java.io.Closeable;
import java.io.IOException;
Expand Down Expand Up @@ -165,8 +164,8 @@ public String action() {
* This class is a registry that allows
*/
final class ResponseHandlers {
private final ConcurrentMapLong<ResponseContext<? extends TransportResponse>> handlers = ConcurrentCollections
.newConcurrentMapLongWithAggressiveConcurrency();
private final Map<Long, ResponseContext<? extends TransportResponse>> handlers = ConcurrentCollections
.newConcurrentMapWithAggressiveConcurrency();
private final AtomicLong requestIdGenerator = new AtomicLong();

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.AggregationBuilders;
Expand Down Expand Up @@ -615,7 +614,7 @@ public void testRealtime_multipleStopCalls() throws Exception {
final String datafeedId = jobId + "-datafeed";
startRealtime(jobId);

ConcurrentMapLong<AssertionError> exceptions = ConcurrentCollections.newConcurrentMapLong();
Map<Long, AssertionError> exceptions = ConcurrentCollections.newConcurrentMap();

// It's practically impossible to assert that a stop request has waited
// for a concurrently executing request to finish before returning.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -725,8 +725,8 @@ public void testDelete_multipleRequest() throws Exception {
String jobId = "delete-job-multiple-times";
createFarequoteJob(jobId);

ConcurrentMapLong<Response> responses = ConcurrentCollections.newConcurrentMapLong();
ConcurrentMapLong<ResponseException> responseExceptions = ConcurrentCollections.newConcurrentMapLong();
Map<Long, Response> responses = ConcurrentCollections.newConcurrentMap();
Map<Long, ResponseException> responseExceptions = ConcurrentCollections.newConcurrentMap();
AtomicReference<IOException> ioe = new AtomicReference<>();
AtomicInteger recreationGuard = new AtomicInteger(0);
AtomicReference<Response> recreationResponse = new AtomicReference<>();
Expand Down

0 comments on commit 686557e

Please sign in to comment.