Skip to content

Commit

Permalink
added full decomposition, scaling unit cell size back
Browse files Browse the repository at this point in the history
  • Loading branch information
rfecher committed Sep 26, 2016
1 parent 6194180 commit a08ace8
Show file tree
Hide file tree
Showing 4 changed files with 108 additions and 32 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,12 @@
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

import com.google.uzaygezen.core.CompactHilbertCurve;
import com.google.uzaygezen.core.MultiDimensionalSpec;

import mil.nga.giat.geowave.core.index.ByteArrayUtils;
import mil.nga.giat.geowave.core.index.PersistenceUtils;
Expand All @@ -13,16 +18,88 @@
import mil.nga.giat.geowave.core.index.sfc.SpaceFillingCurve;
import mil.nga.giat.geowave.core.index.sfc.data.MultiDimensionalNumericData;

import com.google.uzaygezen.core.CompactHilbertCurve;
import com.google.uzaygezen.core.MultiDimensionalSpec;

/***
* Implementation of a Compact Hilbert space filling curve
*
*
*/
public class HilbertSFC implements
SpaceFillingCurve
{
private static class QueryCacheKey
{
private final double[] minsPerDimension;
private final double[] maxesPerDimension;
private final boolean overInclusiveOnEdge;
private final int maxFilteredIndexedRanges;

public QueryCacheKey(
final double[] minsPerDimension,
final double[] maxesPerDimension,
final boolean overInclusiveOnEdge,
final int maxFilteredIndexedRanges ) {
this.minsPerDimension = minsPerDimension;
this.maxesPerDimension = maxesPerDimension;
this.overInclusiveOnEdge = overInclusiveOnEdge;
this.maxFilteredIndexedRanges = maxFilteredIndexedRanges;
}

@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = (prime * result) + maxFilteredIndexedRanges;
result = (prime * result) + Arrays.hashCode(maxesPerDimension);
result = (prime * result) + Arrays.hashCode(minsPerDimension);
result = (prime * result) + (overInclusiveOnEdge ? 1231 : 1237);
return result;
}

@Override
public boolean equals(
final Object obj ) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final QueryCacheKey other = (QueryCacheKey) obj;
if (maxFilteredIndexedRanges != other.maxFilteredIndexedRanges) {
return false;
}
if (!Arrays.equals(
maxesPerDimension,
other.maxesPerDimension)) {
return false;
}
if (!Arrays.equals(
minsPerDimension,
other.minsPerDimension)) {
return false;
}
if (overInclusiveOnEdge != other.overInclusiveOnEdge) {
return false;
}
return true;
}
}

private static final int MAX_CACHED_QUERIES = 500;
private final Map<QueryCacheKey, RangeDecomposition> queryDecompositionCache = new LinkedHashMap<QueryCacheKey, RangeDecomposition>(
MAX_CACHED_QUERIES + 1,
.75F,
true) {
private static final long serialVersionUID = 1L;

@Override
public boolean removeEldestEntry(
final Map.Entry<QueryCacheKey, RangeDecomposition> eldest ) {
return size() > MAX_CACHED_QUERIES;
}
};
protected CompactHilbertCurve compactHilbertCurve;
protected SFCDimensionDefinition[] dimensionDefinitions;
protected int totalPrecision;
Expand All @@ -37,7 +114,7 @@ protected HilbertSFC() {}
/***
* Use the SFCFactory.createSpaceFillingCurve method - don't call this
* constructor directly
*
*
*/
public HilbertSFC(
final SFCDimensionDefinition[] dimensionDefs ) {
Expand Down Expand Up @@ -143,14 +220,26 @@ public RangeDecomposition decomposeRange(
if (maxFilteredIndexedRanges == -1) {
maxFilteredIndexedRanges = Integer.MAX_VALUE;
}
return decomposeQueryOperations.decomposeRange(
query.getDataPerDimension(),
compactHilbertCurve,
dimensionDefinitions,
totalPrecision,
maxFilteredIndexedRanges,
REMOVE_VACUUM,
overInclusiveOnEdge);
final QueryCacheKey key = new QueryCacheKey(
query.getMinValuesPerDimension(),
query.getMaxValuesPerDimension(),
overInclusiveOnEdge,
maxFilteredIndexedRanges);
RangeDecomposition rangeDecomp = queryDecompositionCache.get(key);
if (rangeDecomp == null) {
rangeDecomp = decomposeQueryOperations.decomposeRange(
query.getDataPerDimension(),
compactHilbertCurve,
dimensionDefinitions,
totalPrecision,
maxFilteredIndexedRanges,
REMOVE_VACUUM,
overInclusiveOnEdge);
queryDecompositionCache.put(
key,
rangeDecomp);
}
return rangeDecomp;
}

protected static byte[] fitExpectedByteCount(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public class PrimitiveHilbertSFCOperations implements
{
protected final static long UNIT_CELL_SIZE = (long) Math.pow(
2,
20);
19);
protected long[] binsPerDimension;

protected long minHilbertValue;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public class UnboundedHilbertSFCOperations implements
protected final static BigInteger UNIT_CELL_SIZE = BigDecimal.valueOf(
Math.pow(
2,
20)).toBigInteger();
19)).toBigInteger();
protected BigDecimal[] binsPerDimension;
protected BigInteger minHilbertValue;
protected BigInteger maxHilbertValue;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,23 +121,10 @@ protected static List<ByteArrayRange> getQueryRanges(
maxRangeDecompositionPerBin = (int) Math.ceil((double) maxRanges / (double) binnedQueries.length);
}
for (final BinnedNumericDataset binnedQuery : binnedQueries) {
final RangeDecomposition rangeDecomp;
if (binnedQuery.isFullExtent()) {
rangeDecomp = new RangeDecomposition(
new ByteArrayRange[] {
new ByteArrayRange(
new ByteArrayId(
new byte[] {}),
new ByteArrayId(
new byte[] {}))
});
}
else {
rangeDecomp = sfc.decomposeRange(
binnedQuery,
true,
maxRangeDecompositionPerBin);
}
final RangeDecomposition rangeDecomp = sfc.decomposeRange(
binnedQuery,
true,
maxRangeDecompositionPerBin);
final byte[] tierAndBinId = ByteArrayUtils.combineArrays(
new byte[] {
tier
Expand Down

0 comments on commit a08ace8

Please sign in to comment.