Skip to content

Commit

Permalink
Remove hppc from search tests (#85468)
Browse files Browse the repository at this point in the history
This commit removes uses of hppc collections in search tests that are
purely for local record keeping.

relates #84735
  • Loading branch information
rjernst committed Mar 30, 2022
1 parent 898d849 commit 53fb1d8
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
*/
package org.elasticsearch.search.aggregations.bucket.range;

import com.carrotsearch.hppc.LongHashSet;

import org.apache.lucene.tests.util.TestUtil;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues;
Expand Down Expand Up @@ -93,12 +91,12 @@ protected void doCollect(LeafBucketCollector sub, int doc, long bucket) throws I
final int[] expectedCounts = new int[ranges.length];
final int maxDoc = randomIntBetween(5, 10);
for (int doc = 0; doc < maxDoc; ++doc) {
LongHashSet ordinalSet = new LongHashSet();
Set<Long> ordinalSet = new HashSet<>();
final int numValues = randomInt(maxNumValuesPerDoc);
while (ordinalSet.size() < numValues) {
ordinalSet.add(random().nextInt(terms.length));
ordinalSet.add(random().nextLong(terms.length));
}
final long[] ords = ordinalSet.toArray();
final long[] ords = ordinalSet.stream().mapToLong(Long::longValue).toArray();
Arrays.sort(ords);
values.ords = ords;

Expand Down Expand Up @@ -194,12 +192,12 @@ protected void doCollect(LeafBucketCollector sub, int doc, long bucket) throws I
final int[] expectedCounts = new int[ranges.length];
final int maxDoc = randomIntBetween(5, 10);
for (int doc = 0; doc < maxDoc; ++doc) {
LongHashSet ordinalSet = new LongHashSet();
Set<Long> ordinalSet = new HashSet<>();
final int numValues = randomInt(maxNumValuesPerDoc);
while (ordinalSet.size() < numValues) {
ordinalSet.add(random().nextInt(terms.length));
ordinalSet.add(random().nextLong(terms.length));
}
final long[] ords = ordinalSet.toArray();
final long[] ords = ordinalSet.stream().mapToLong(Long::longValue).toArray();
Arrays.sort(ords);
values.ords = ords;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,6 @@

package org.elasticsearch.search.aggregations.metrics;

import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.ObjectIntMap;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.hppc.ObjectObjectMap;

import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.Strings;
Expand All @@ -30,7 +25,9 @@
import org.elasticsearch.xcontent.XContentFactory;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
Expand All @@ -55,8 +52,8 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
protected static GeoPoint[] singleValues, multiValues;
protected static GeoPoint singleTopLeft, singleBottomRight, multiTopLeft, multiBottomRight, singleCentroid, multiCentroid,
unmappedCentroid;
protected static ObjectIntMap<String> expectedDocCountsForGeoHash = null;
protected static ObjectObjectMap<String, GeoPoint> expectedCentroidsForGeoHash = null;
protected static Map<String, Integer> expectedDocCountsForGeoHash = null;
protected static Map<String, GeoPoint> expectedCentroidsForGeoHash = null;
protected static final double GEOHASH_TOLERANCE = 1E-5D;

@Override
Expand Down Expand Up @@ -85,8 +82,8 @@ public void setupSuiteScopeCluster() throws Exception {

numDocs = randomIntBetween(6, 20);
numUniqueGeoPoints = randomIntBetween(1, numDocs);
expectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
expectedCentroidsForGeoHash = new ObjectObjectHashMap<>(numDocs * 2);
expectedDocCountsForGeoHash = new HashMap<>(numDocs * 2);
expectedCentroidsForGeoHash = new HashMap<>(numDocs * 2);

singleValues = new GeoPoint[numUniqueGeoPoints];
for (int i = 0; i < singleValues.length; i++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
package org.elasticsearch.search.aggregations.metrics;

import com.carrotsearch.hppc.BitMixer;
import com.carrotsearch.hppc.IntHashSet;

import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.breaker.CircuitBreakingException;
Expand All @@ -21,6 +20,8 @@
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.test.ESTestCase;

import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;

import static org.elasticsearch.search.aggregations.metrics.AbstractCardinalityAlgorithm.MAX_PRECISION;
Expand Down Expand Up @@ -60,7 +61,7 @@ public void testAccuracy() {
final int numValues = randomIntBetween(1, 100000);
final int maxValue = randomIntBetween(1, randomBoolean() ? 1000 : 100000);
final int p = randomIntBetween(14, MAX_PRECISION);
IntHashSet set = new IntHashSet();
Set<Integer> set = new HashSet<>();
HyperLogLogPlusPlus e = new HyperLogLogPlusPlus(p, BigArrays.NON_RECYCLING_INSTANCE, 1);
for (int i = 0; i < numValues; ++i) {
final int n = randomInt(maxValue);
Expand Down

0 comments on commit 53fb1d8

Please sign in to comment.