Skip to content

Commit

Permalink
Added tests and security policies
Browse files Browse the repository at this point in the history
Signed-off-by: Ketan Verma <ketan9495@gmail.com>
  • Loading branch information
ketanv3 committed Jul 20, 2023
1 parent f3ee896 commit 67ee51c
Show file tree
Hide file tree
Showing 7 changed files with 162 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@
import org.opensearch.common.lease.Releasables;
import org.opensearch.core.common.util.ByteArray;

import java.security.AccessController;
import java.security.PrivilegedAction;

/**
* Specialized hash table implementation that maps a {@link BytesRef} key to a long ordinal.
*
Expand All @@ -28,7 +31,9 @@
* @opensearch.internal
*/
public class CompactBytesRefHash implements Releasable {
private static final LongHashFunction XX3 = LongHashFunction.xx3(System.nanoTime());
private static final LongHashFunction XX3 = AccessController.doPrivileged(
(PrivilegedAction<LongHashFunction>) () -> LongHashFunction.xx3(System.nanoTime())
);

private static final long MAX_CAPACITY = 1L << 32;
private static final long DEFAULT_INITIAL_CAPACITY = 32;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@
import org.opensearch.common.lease.Releasables;
import org.opensearch.core.common.util.ByteArray;

import java.security.AccessController;
import java.security.PrivilegedAction;

/**
* Specialized hash table implementation that maps a {@link BytesRef} key to a long ordinal.
*
Expand All @@ -29,7 +32,9 @@
* @opensearch.internal
*/
public class ReorganizingBytesRefHash implements Releasable {
private static final LongHashFunction XX3 = LongHashFunction.xx3(System.nanoTime());
private static final LongHashFunction XX3 = AccessController.doPrivileged(
(PrivilegedAction<LongHashFunction>) () -> LongHashFunction.xx3(System.nanoTime())
);

private static final long MAX_CAPACITY = 1L << 32;
private static final long DEFAULT_INITIAL_CAPACITY = 32;
Expand Down Expand Up @@ -278,6 +283,14 @@ public void close() {
Releasables.close(table, offsets, keys);
}

/**
* Returns the underlying hash table.
* Visible for unit-tests.
*/
LongArray getTable() {
return table;
}

/**
* Hasher calculates the hash of a {@link BytesRef} key.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.opensearch.common.util.BytesRefHash;
import org.opensearch.common.lease.Releasable;
import org.opensearch.common.lease.Releasables;
import org.opensearch.common.util.CompactBytesRefHash;
import org.opensearch.search.aggregations.CardinalityUpperBound;

/**
Expand Down Expand Up @@ -128,10 +129,10 @@ public void readValue(BytesRef dest) {}
* @opensearch.internal
*/
private static class FromSingle extends BytesKeyedBucketOrds {
private final BytesRefHash ords;
private final CompactBytesRefHash ords;

private FromSingle(BigArrays bigArrays) {
ords = new BytesRefHash(1, bigArrays);
ords = new CompactBytesRefHash(bigArrays);
}

@Override
Expand Down
10 changes: 10 additions & 0 deletions server/src/main/resources/org/opensearch/bootstrap/security.policy
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@ grant codeBase "${codebase.opensearch}" {
permission java.lang.RuntimePermission "setContextClassLoader";
// needed for SPI class loading
permission java.lang.RuntimePermission "accessDeclaredMembers";

// needed for zero-allocation-hashing
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};

//// Very special jar permissions:
Expand Down Expand Up @@ -85,6 +89,12 @@ grant codeBase "${codebase.zstd-jni}" {
permission java.lang.RuntimePermission "loadLibrary.*";
};

grant codeBase "${codebase.zero-allocation-hashing}" {
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
permission java.lang.RuntimePermission "accessDeclaredMembers";
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};

//// Everything else:

grant {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,5 +156,6 @@ grant {
permission java.lang.RuntimePermission "accessDeclaredMembers";
permission java.lang.RuntimePermission "reflectionFactoryAccess";
permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect";
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.common.util;

import net.openhft.hashing.LongHashFunction;
import org.apache.lucene.util.BytesRef;
import org.opensearch.test.OpenSearchTestCase;

import java.util.HashMap;
import java.util.Map;
import java.util.stream.Stream;

public class CompactBytesRefHashTests extends OpenSearchTestCase {

public void testFuzzy() {
LongHashFunction hasher = LongHashFunction.xx3(randomLong());
Map<BytesRef, Long> reference = new HashMap<>();
BytesRef[] keys = Stream.generate(() -> new BytesRef(randomAlphaOfLength(20))).limit(1000).toArray(BytesRef[]::new);

try (
CompactBytesRefHash h = new CompactBytesRefHash(
randomIntBetween(1, 100), // random capacity
0.6f + randomFloat() * 0.39f, // random load factor to verify collision resolution
key -> hasher.hashBytes(key.bytes, key.offset, key.length),
BigArrays.NON_RECYCLING_INSTANCE
)
) {
// Verify the behaviour of "add" and "find".
for (int i = 0; i < keys.length * 10; i++) {
BytesRef key = keys[i % keys.length];
if (reference.containsKey(key)) {
long expectedOrdinal = reference.get(key);
assertEquals(-1 - expectedOrdinal, h.add(key));
assertEquals(expectedOrdinal, h.find(key));
} else {
assertEquals(-1, h.find(key));
reference.put(key, (long) reference.size());
assertEquals((long) reference.get(key), h.add(key));
}
}

// Verify the behaviour of "get".
BytesRef scratch = new BytesRef();
for (Map.Entry<BytesRef, Long> entry : reference.entrySet()) {
assertEquals(entry.getKey(), h.get(entry.getValue(), scratch));
}

// Verify the behaviour of "size".
assertEquals(reference.size(), h.size());
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.common.util;

import net.openhft.hashing.LongHashFunction;
import org.apache.lucene.util.BytesRef;
import org.opensearch.test.OpenSearchTestCase;

import java.util.HashMap;
import java.util.Map;
import java.util.stream.Stream;

public class ReorganizingBytesRefHashTests extends OpenSearchTestCase {

public void testFuzzy() {
LongHashFunction hasher = LongHashFunction.xx3(randomLong());
Map<BytesRef, Long> reference = new HashMap<>();
BytesRef[] keys = Stream.generate(() -> new BytesRef(randomAlphaOfLength(20))).limit(1000).toArray(BytesRef[]::new);

try (
ReorganizingBytesRefHash h = new ReorganizingBytesRefHash(
randomIntBetween(1, 100), // random capacity
0.6f + randomFloat() * 0.39f, // random load factor to verify collision resolution
key -> hasher.hashBytes(key.bytes, key.offset, key.length),
BigArrays.NON_RECYCLING_INSTANCE
)
) {
// Verify the behaviour of "add" and "find".
for (int i = 0; i < keys.length * 10; i++) {
BytesRef key = keys[i % keys.length];
if (reference.containsKey(key)) {
long expectedOrdinal = reference.get(key);
assertEquals(-1 - expectedOrdinal, h.add(key));
assertEquals(expectedOrdinal, h.find(key));
} else {
assertEquals(-1, h.find(key));
reference.put(key, (long) reference.size());
assertEquals((long) reference.get(key), h.add(key));
}
}

// Verify the behaviour of "get".
BytesRef scratch = new BytesRef();
for (Map.Entry<BytesRef, Long> entry : reference.entrySet()) {
assertEquals(entry.getKey(), h.get(entry.getValue(), scratch));
}

// Verify the behaviour of "size".
assertEquals(reference.size(), h.size());

// Verify the calculation of PSLs.
long capacity = h.getTable().size();
long mask = capacity - 1;
for (long idx = 0; idx < h.getTable().size(); idx++) {
long value = h.getTable().get(idx);
if (value != -1) {
BytesRef key = h.get((int) value, scratch);
long homeIdx = hasher.hashBytes(key.bytes, key.offset, key.length) & mask;
assertEquals((capacity + idx - homeIdx) & mask, value >>> 48);
}
}
}
}
}

0 comments on commit 67ee51c

Please sign in to comment.