Skip to content

Commit

Permalink
initial cut at separating partitions from sort keys, accumulo passing
Browse files Browse the repository at this point in the history
tests (#1018)
  • Loading branch information
rfecher committed Jul 26, 2017
1 parent d0c7a59 commit 178a5dd
Show file tree
Hide file tree
Showing 533 changed files with 14,356 additions and 15,069 deletions.
12 changes: 6 additions & 6 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,15 @@ env:
# The rest skip the unit tests and run ITs only
- NAME='UT and Publish Docs with Latest ASF Versions' MAVEN_PROFILES='""' BUILD_DOCS=true IT_ONLY=false
- NAME='Accumulo IT on Latest ASF Versions' MAVEN_PROFILES='accumulo-it' BUILD_DOCS=false IT_ONLY=true
- NAME='HBase IT on Latest ASF Versions' MAVEN_PROFILES='hbase-it' BUILD_DOCS=false IT_ONLY=true
- NAME='Cassandra IT on Latest ASF Versions' MAVEN_PROFILES='cassandra-it' BUILD_DOCS=false IT_ONLY=true
- NAME='DynamoDB IT on Latest ASF Versions' MAVEN_PROFILES='dynamodb-it' BUILD_DOCS=false IT_ONLY=true
- NAME='Bigtable IT on Latest ASF Versions' MAVEN_PROFILES='bigtable-it' BUILD_DOCS=false IT_ONLY=true
#- NAME='HBase IT on Latest ASF Versions' MAVEN_PROFILES='hbase-it' BUILD_DOCS=false IT_ONLY=true
#- NAME='Cassandra IT on Latest ASF Versions' MAVEN_PROFILES='cassandra-it' BUILD_DOCS=false IT_ONLY=true
#- NAME='DynamoDB IT on Latest ASF Versions' MAVEN_PROFILES='dynamodb-it' BUILD_DOCS=false IT_ONLY=true
#- NAME='Bigtable IT on Latest ASF Versions' MAVEN_PROFILES='bigtable-it' BUILD_DOCS=false IT_ONLY=true
- NAME='Accumulo IT on Latest CDH Versions' MAVEN_PROFILES='accumulo-it,cloudera' BUILD_DOCS=false IT_ONLY=true
- NAME='Accumulo IT on Latest HDP Versions' MAVEN_PROFILES='accumulo-it,hortonworks' BUILD_DOCS=false IT_ONLY=true
- NAME='Accumulo IT on Older ASF and GeoTools Versions' MAVEN_PROFILES='accumulo-it,compatibility' BUILD_DOCS=false IT_ONLY=true
- NAME='HBase IT on Latest CDH Versions' MAVEN_PROFILES='hbase-it,cloudera' BUILD_DOCS=false IT_ONLY=true
- NAME='HBase IT on Latest HDP Versions' MAVEN_PROFILES='hbase-it,hortonworks' BUILD_DOCS=false IT_ONLY=true
#- NAME='HBase IT on Latest CDH Versions' MAVEN_PROFILES='hbase-it,cloudera' BUILD_DOCS=false IT_ONLY=true
#- NAME='HBase IT on Latest HDP Versions' MAVEN_PROFILES='hbase-it,hortonworks' BUILD_DOCS=false IT_ONLY=true
global:
- secure: "TosKDl5mnt8UKeyWDg65i6cWENR7EorQbFPSvZ5ZfQfAaDAOeIN2OA/zxtRMELeYM82+n+GGXQOt0qPiYqyRlufYJJSUnWiwvI5gm3a8+f58atcU2R2bF9jd81bsL9jCS+JCQxAmzh8FCO6t7DJ4OdoMyMaIR7XjlSlsIJ97dd8="
- secure: "IcwzKevdTSsKK9YERJ/LV81pfDe7Fx7qBxYcy43b0/emsioZJsJV5XSYHfFRIqceMkzp8LFBU8qiZR3cPZPKQoCjaG1QcwDeKQpyczIkMwzWzydhLR5dAzVETbQC9i2hH4sWjVVHW5WU6UUc3gCz5rPyIXFUYVUYxFeMWxHCe8w="
Expand Down
4 changes: 2 additions & 2 deletions .utility/run-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ set -ev

if [ "$IT_ONLY" == "true" ]; then
echo -e "Skipping unit tests w/ verify...\n"
mvn -q verify -Dtest=SkipUnitTests -DfailIfNoTests=false -P $MAVEN_PROFILES
mvn -q verify -am -pl test -Dtest=SkipUnitTests -DfailIfNoTests=false -P $MAVEN_PROFILES
else
echo -e "Running unit tests only w/ verify...\n"
mvn -q verify -P $MAVEN_PROFILES
mvn -q verify -am -pl test -P $MAVEN_PROFILES
fi
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@
import mil.nga.giat.geowave.analytic.store.PersistableStore;
import mil.nga.giat.geowave.core.geotime.ingest.SpatialDimensionalityTypeProvider;
import mil.nga.giat.geowave.core.index.ByteArrayId;
import mil.nga.giat.geowave.core.index.StringUtils;
import mil.nga.giat.geowave.core.store.CloseableIterator;
import mil.nga.giat.geowave.core.store.DataStore;
import mil.nga.giat.geowave.core.store.IndexWriter;
Expand All @@ -69,45 +68,45 @@
import mil.nga.giat.geowave.mapreduce.GeoWaveConfiguratorBase;

/**
*
*
* Manages the population of centroids by group id and batch id.
*
*
* Properties:
*
*
* @formatter:off
*
*
* "CentroidManagerGeoWave.Centroid.WrapperFactoryClass" -
* {@link AnalyticItemWrapperFactory} to extract wrap spatial
* objects with Centroid management function
*
*
* "CentroidManagerGeoWave.Centroid.DataTypeId" -> The data type
* ID of the centroid simple feature
*
*
* "CentroidManagerGeoWave.Centroid.IndexId" -> The GeoWave index
* ID of the centroid simple feature
*
*
* "CentroidManagerGeoWave.Global.BatchId" -> Batch ID for
* updates
*
*
* "CentroidManagerGeoWave.Global.Zookeeper" -> Zookeeper URL
*
*
* "CentroidManagerGeoWave.Global.AccumuloInstance" -> Accumulo
* Instance Name
*
*
* "CentroidManagerGeoWave.Global.AccumuloUser" -> Accumulo User
* name
*
*
* "CentroidManagerGeoWave.Global.AccumuloPassword" -> Accumulo
* Password
*
*
* "CentroidManagerGeoWave.Global.AccumuloNamespace" -> Accumulo
* Table Namespace
*
*
* "CentroidManagerGeoWave.Common.AccumuloConnectFactory" ->
* {@link BasicAccumuloOperationsFactory}
*
*
* @formatter:on
*
*
* @param <T>
* The item type used to represent a centroid.
*/
Expand Down Expand Up @@ -153,10 +152,10 @@ public CentroidManagerGeoWave(
this.dataStore = dataStore;
this.indexStore = indexStore;
index = (PrimaryIndex) indexStore.getIndex(new ByteArrayId(
StringUtils.stringToBinary(indexId)));
indexId));
this.adapterStore = adapterStore;
adapter = (GeotoolsFeatureDataAdapter) adapterStore.getAdapter(new ByteArrayId(
StringUtils.stringToBinary(centroidDataTypeId)));
centroidDataTypeId));
}

@SuppressWarnings("unchecked")
Expand Down Expand Up @@ -240,24 +239,24 @@ private void init(
final String indexId = scopedJob.getString(
CentroidParameters.Centroid.INDEX_ID,
new SpatialDimensionalityTypeProvider().createPrimaryIndex().getId().getString());
PersistableStore store = (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue(
final PersistableStore store = (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue(
context,
scope,
null);

dataStore = store.getDataStoreOptions().createDataStore();
indexStore = store.getDataStoreOptions().createIndexStore();
index = (PrimaryIndex) indexStore.getIndex(new ByteArrayId(
StringUtils.stringToBinary(indexId)));
indexId));
adapterStore = store.getDataStoreOptions().createAdapterStore();
adapter = (GeotoolsFeatureDataAdapter) adapterStore.getAdapter(new ByteArrayId(
StringUtils.stringToBinary(centroidDataTypeId)));
centroidDataTypeId));
}

/**
* Creates a new centroid based on the old centroid with new coordinates and
* dimension values
*
*
* @param feature
* @param coordinate
* @param extraNames
Expand Down Expand Up @@ -293,7 +292,7 @@ public void delete(
final String[] dataIds )
throws IOException {
final ByteArrayId adapterId = new ByteArrayId(
StringUtils.stringToBinary(centroidDataTypeId));
centroidDataTypeId);
for (final String dataId : dataIds) {

if (dataId != null) {
Expand All @@ -302,9 +301,8 @@ public void delete(
adapterId,
index.getId()),
new DataIdQuery(
adapterId,
new ByteArrayId(
StringUtils.stringToBinary(dataId))));
dataId)));
}
}
}
Expand Down Expand Up @@ -428,15 +426,14 @@ private List<AnalyticItemWrapper<T>> loadCentroids(
public AnalyticItemWrapper<T> getCentroid(
final String dataId ) {
final ByteArrayId adapterId = new ByteArrayId(
StringUtils.stringToBinary(centroidDataTypeId));
centroidDataTypeId);
try (CloseableIterator<T> it = dataStore.query(
new QueryOptions(
adapterId,
index.getId()),
new DataIdQuery(
adapterId,
new ByteArrayId(
StringUtils.stringToBinary(dataId))))) {
dataId)))) {
if (it.hasNext()) {
return centroidFactory.create(it.next());
}
Expand Down Expand Up @@ -563,7 +560,7 @@ public static void setParameters(
@Override
public ByteArrayId getDataTypeId() {
return new ByteArrayId(
StringUtils.stringToBinary(centroidDataTypeId));
centroidDataTypeId);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import mil.nga.giat.geowave.core.geotime.store.query.SpatialQuery;
import mil.nga.giat.geowave.core.index.ByteArrayId;
import mil.nga.giat.geowave.core.index.ByteArrayRange;
import mil.nga.giat.geowave.core.index.QueryRanges;
import mil.nga.giat.geowave.core.store.adapter.AdapterStore;
import mil.nga.giat.geowave.core.store.adapter.DataAdapter;
import mil.nga.giat.geowave.core.store.index.Index;
Expand Down Expand Up @@ -108,11 +109,11 @@ public static PrimaryIndex[] getIndices(
* Method takes in a polygon and generates the corresponding ranges in a
* GeoWave spatial index
*/
protected static List<ByteArrayRange> getGeoWaveRangesForQuery(
protected static QueryRanges getGeoWaveRangesForQuery(
final Polygon polygon ) {

final PrimaryIndex index = new SpatialDimensionalityTypeProvider().createPrimaryIndex();
final List<ByteArrayRange> ranges = DataStoreUtils.constraintsToByteArrayRanges(
final QueryRanges ranges = DataStoreUtils.constraintsToQueryRanges(
new SpatialQuery(
polygon).getIndexConstraints(index.getIndexStrategy()),
index.getIndexStrategy(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import mil.nga.giat.geowave.analytic.PropertyManagement;
import mil.nga.giat.geowave.analytic.store.PersistableStore;
import mil.nga.giat.geowave.core.store.operations.remote.options.DataStorePluginOptions;
import mil.nga.giat.geowave.core.store.cli.remote.options.DataStorePluginOptions;
import mil.nga.giat.geowave.mapreduce.input.GeoWaveInputFormat;

public class InputStoreParameterHelper implements
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import mil.nga.giat.geowave.analytic.PropertyManagement;
import mil.nga.giat.geowave.analytic.store.PersistableStore;
import mil.nga.giat.geowave.core.store.operations.remote.options.DataStorePluginOptions;
import mil.nga.giat.geowave.core.store.cli.remote.options.DataStorePluginOptions;
import mil.nga.giat.geowave.mapreduce.output.GeoWaveOutputFormat;

public class OutputStoreParameterHelper implements
Expand Down

0 comments on commit 178a5dd

Please sign in to comment.