Skip to content

Commit

Permalink
tweaks to maven munge settings
Browse files Browse the repository at this point in the history
  • Loading branch information
chrisbennight committed Jun 17, 2015
1 parent e46e0fb commit 6cfc6bd
Show file tree
Hide file tree
Showing 13 changed files with 234 additions and 131 deletions.
18 changes: 8 additions & 10 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@ jdk:
- oraclejdk7
env:
matrix:
- ACCUMULO_VERSION='1.6.2' ACCUMULO_API='1.6' HADOOP_VERSION='2.6.0' GEOTOOLS_VERSION='12.2' GEOSERVER_VERSION='2.6.2' PLATFORM_VERSION='""'
- ACCUMULO_VERSION='1.6.0-cdh5.1.4' ACCUMULO_API='1.6' HADOOP_VERSION='2.6.0-cdh5.4.0' GEOTOOLS_VERSION='13.1' GEOSERVER_VERSION='2.7.1' PLATFORM_VERSION='cloudera'
- ACCUMULO_VERSION='1.7.0' ACCUMULO_API='1.7' HADOOP_VERSION='2.6.0' GEOTOOLS_VERSION='13.1' GEOSERVER_VERSION='2.7.1' PLATFORM_VERSION='""'
- ACCUMULO_VERSION='1.6.1.2.2.4.0-2633' ACCUMULO_API='1.6' HADOOP_VERSION='2.6.0.2.2.4.0-2633' GEOTOOLS_VERSION='13.1' GEOSERVER_VERSION='2.7.1' PLATFORM_VERSION='hortonworks'
- ACCUMULO_VERSION='1.6.2' ACCUMULO_API='1.6' HADOOP_VERSION='2.6.0' GEOTOOLS_VERSION='12.2' GEOSERVER_VERSION='2.6.2' PLATFORM_VERSION='""'
- ACCUMULO_VERSION='1.6.0-cdh5.1.4' ACCUMULO_API='1.6' HADOOP_VERSION='2.6.0-cdh5.4.0' GEOTOOLS_VERSION='13.1' GEOSERVER_VERSION='2.7.1' PLATFORM_VERSION='cloudera'
- ACCUMULO_VERSION='1.7.0' ACCUMULO_API='1.7' HADOOP_VERSION='2.6.0' GEOTOOLS_VERSION='13.1' GEOSERVER_VERSION='2.7.1' PLATFORM_VERSION='""'
- ACCUMULO_VERSION='1.6.1.2.2.4.0-2633' ACCUMULO_API='1.6' HADOOP_VERSION='2.6.0.2.2.4.0-2633' GEOTOOLS_VERSION='13.1' GEOSERVER_VERSION='2.7.1' PLATFORM_VERSION='hortonworks'
global:
- secure: "TosKDl5mnt8UKeyWDg65i6cWENR7EorQbFPSvZ5ZfQfAaDAOeIN2OA/zxtRMELeYM82+n+GGXQOt0qPiYqyRlufYJJSUnWiwvI5gm3a8+f58atcU2R2bF9jd81bsL9jCS+JCQxAmzh8FCO6t7DJ4OdoMyMaIR7XjlSlsIJ97dd8="
- secure: "IcwzKevdTSsKK9YERJ/LV81pfDe7Fx7qBxYcy43b0/emsioZJsJV5XSYHfFRIqceMkzp8LFBU8qiZR3cPZPKQoCjaG1QcwDeKQpyczIkMwzWzydhLR5dAzVETbQC9i2hH4sWjVVHW5WU6UUc3gCz5rPyIXFUYVUYxFeMWxHCe8w="
Expand All @@ -16,8 +16,8 @@ env:
cache:
directories:
- $HOME/.m2
install: "mvn -q clean install javadoc:aggregate -Dfindbugs.skip -Daccumulo.version=${ACCUMULO_VERSION} -DACCUMULO_API_${ACCUMULO_API} -Dhadoop.version=${HADOOP_VERSION} -Dgeotools.version=${GEOTOOLS_VERSION} -Dgeoserver.version=${GEOSERVER_VERSION} -DskipITs=true -DskipTests=true -P ${PLATFORM_VERSION}; .utility/build-docs-site.sh"
script: "mvn -q -T 2C verify -Daccumulo.version=${ACCUMULO_VERSION} -DACCUMULO_API_${ACCUMULO_API} -Dhadoop.version=${HADOOP_VERSION} -Dgeotools.version=${GEOTOOLS_VERSION} -Dgeoserver.version=${GEOSERVER_VERSION} -P ${PLATFORM_VERSION}"
install: "mvn -q clean install javadoc:aggregate -Dfindbugs.skip -Daccumulo.version=${ACCUMULO_VERSION} -Daccumulo.api=${ACCUMULO_API} -Dhadoop.version=${HADOOP_VERSION} -Dgeotools.version=${GEOTOOLS_VERSION} -Dgeoserver.version=${GEOSERVER_VERSION} -DskipITs=true -DskipTests=true -P ${PLATFORM_VERSION}; .utility/build-docs-site.sh"
script: "mvn -q -T 2C verify -Daccumulo.version=${ACCUMULO_VERSION} -Daccumulo.api=${ACCUMULO_API} -Dhadoop.version=${HADOOP_VERSION} -Dgeotools.version=${GEOTOOLS_VERSION} -Dgeoserver.version=${GEOSERVER_VERSION} -P ${PLATFORM_VERSION}"
before_install:
- export MAVEN_OPTS="-Xmx512m -XX:MaxPermSize=192m"
- chmod +x .utility/push-javadoc-to-gh-pages.sh
Expand All @@ -29,7 +29,7 @@ before_install:
after_success:
- .utility/changelog-generator.sh
- .utility/push-javadoc-to-gh-pages.sh
- cd test; mvn coveralls:report -Daccumulo.version=${ACCUMULO_VERSION} -DACCUMULO_API_${ACCUMULO_API} -Dhadoop.version=${HADOOP_VERSION} -Dgeotools.version=${GEOTOOLS_VERSION} -Dgeoserver.version=${GEOSERVER_VERSION} -P ${PLATFORM_VERSION}
- cd test; mvn coveralls:report -Daccumulo.version=${ACCUMULO_VERSION} -Daccumulo.api=${ACCUMULO_API} -Dhadoop.version=${HADOOP_VERSION} -Dgeotools.version=${GEOTOOLS_VERSION} -Dgeoserver.version=${GEOSERVER_VERSION} -P ${PLATFORM_VERSION}

addons:
apt_packages:
Expand All @@ -40,7 +40,5 @@ addons:
description: Build submitted via Travis CI
notification_email: GeoWave@nga.mil
build_command_prepend: mvn clean
build_command: mvn clean compile -Dfindbugs.skip -Daccumulo.version=${ACCUMULO_VERSION} -DACCUMULO_API_${ACCUMULO_API}
-Dhadoop.version=${HADOOP_VERSION} -Dgeotools.version=${GEOTOOLS_VERSION} -Dgeoserver.version=${GEOSERVER_VERSION}
-DskipITs=true -DskipTests=true -P ${PLATFORM_VERSION}
build_command: mvn clean compile -Dfindbugs.skip -Daccumulo.version=${ACCUMULO_VERSION} -Daccumulo.api=${ACCUMULO_API} -Dgeotools.version=${GEOTOOLS_VERSION} -Dgeoserver.version=${GEOSERVER_VERSION} -DskipITs=true -DskipTests=true -P ${PLATFORM_VERSION}
branch_pattern: coverity_scan
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,10 @@
import com.vividsolutions.jts.geom.Envelope;

abstract public class BoundingBoxDataStatistics<T> extends
AbstractDataStatistics<T> {
public final static ByteArrayId STATS_ID = new ByteArrayId("BOUNDING_BOX");
AbstractDataStatistics<T>
{
public final static ByteArrayId STATS_ID = new ByteArrayId(
"BOUNDING_BOX");

protected double minX = Double.MAX_VALUE;
protected double minY = Double.MAX_VALUE;
Expand All @@ -30,18 +32,23 @@ protected BoundingBoxDataStatistics() {
super();
}

public BoundingBoxDataStatistics(final ByteArrayId dataAdapterId) {
super(dataAdapterId, STATS_ID);
public BoundingBoxDataStatistics(
final ByteArrayId dataAdapterId ) {
super(
dataAdapterId,
STATS_ID);
}

public BoundingBoxDataStatistics(final ByteArrayId dataAdapterId,
final ByteArrayId staticticsId) {
super(dataAdapterId, staticticsId);
public BoundingBoxDataStatistics(
final ByteArrayId dataAdapterId,
final ByteArrayId staticticsId ) {
super(
dataAdapterId,
staticticsId);
}

public boolean isSet() {
if ((minX == Double.MAX_VALUE) || (minY == Double.MAX_VALUE)
|| (maxX == -Double.MAX_VALUE) || (maxY == -Double.MAX_VALUE)) {
if ((minX == Double.MAX_VALUE) || (minY == Double.MAX_VALUE) || (maxX == -Double.MAX_VALUE) || (maxY == -Double.MAX_VALUE)) {
return false;
}
return true;
Expand Down Expand Up @@ -82,7 +89,8 @@ public byte[] toBinary() {
}

@Override
public void fromBinary(final byte[] bytes) {
public void fromBinary(
final byte[] bytes ) {
final ByteBuffer buffer = super.binaryBuffer(bytes);
minX = buffer.getDouble();
minY = buffer.getDouble();
Expand All @@ -91,59 +99,99 @@ public void fromBinary(final byte[] bytes) {
}

@Override
public void entryIngested(final DataStoreEntryInfo entryInfo, final T entry) {
public void entryIngested(
final DataStoreEntryInfo entryInfo,
final T entry ) {
final Envelope env = getEnvelope(entry);
if (env != null) {
minX = Math.min(minX, env.getMinX());
minY = Math.min(minY, env.getMinY());
maxX = Math.max(maxX, env.getMaxX());
maxY = Math.max(maxY, env.getMaxY());
minX = Math.min(
minX,
env.getMinX());
minY = Math.min(
minY,
env.getMinY());
maxX = Math.max(
maxX,
env.getMaxX());
maxY = Math.max(
maxY,
env.getMaxY());
}
}

public Constraints getConstraints() {
// Create a NumericRange object using the x axis
final NumericRange rangeLongitude = new NumericRange(minX, maxX);
final NumericRange rangeLongitude = new NumericRange(
minX,
maxX);

// Create a NumericRange object using the y axis
final NumericRange rangeLatitude = new NumericRange(minY, maxY);
final NumericRange rangeLatitude = new NumericRange(
minY,
maxY);

final Map<Class<? extends NumericDimensionDefinition>, ConstraintData> constraintsPerDimension = new HashMap<Class<? extends NumericDimensionDefinition>, ConstraintData>();
// Create and return a new IndexRange array with an x and y axis
// range
constraintsPerDimension.put(LongitudeDefinition.class,
new ConstraintData(rangeLongitude, true));
constraintsPerDimension.put(LatitudeDefinition.class,
new ConstraintData(rangeLatitude, true));
return new Constraints(constraintsPerDimension);
}

abstract protected Envelope getEnvelope(final T entry);
constraintsPerDimension.put(
LongitudeDefinition.class,
new ConstraintData(
rangeLongitude,
true));
constraintsPerDimension.put(
LatitudeDefinition.class,
new ConstraintData(
rangeLatitude,
true));
return new Constraints(
constraintsPerDimension);
}

abstract protected Envelope getEnvelope(
final T entry );

@Override
public void merge(final Mergeable statistics) {
if ((statistics != null)
&& (statistics instanceof BoundingBoxDataStatistics)) {
public void merge(
final Mergeable statistics ) {
if ((statistics != null) && (statistics instanceof BoundingBoxDataStatistics)) {
final BoundingBoxDataStatistics<T> bboxStats = (BoundingBoxDataStatistics<T>) statistics;
if (bboxStats.isSet()) {
minX = Math.min(minX, bboxStats.minX);
minY = Math.min(minY, bboxStats.minY);
maxX = Math.max(maxX, bboxStats.maxX);
maxY = Math.max(maxY, bboxStats.maxY);
minX = Math.min(
minX,
bboxStats.minX);
minY = Math.min(
minY,
bboxStats.minY);
maxX = Math.max(
maxX,
bboxStats.maxX);
maxY = Math.max(
maxY,
bboxStats.maxY);
}
}
}

public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("bbox[adapter=").append(
buffer.append(
"bbox[adapter=").append(
super.getDataAdapterId().getString());
if (isSet()) {
buffer.append(", minX=").append(minX);
buffer.append(", maxX=").append(maxX);
buffer.append(", minY=").append(minY);
buffer.append(", maxY=").append(maxY);
} else {
buffer.append(
", minX=").append(
minX);
buffer.append(
", maxX=").append(
maxX);
buffer.append(
", minY=").append(
minY);
buffer.append(
", maxY=").append(
maxY);
}
else {
buffer.append(", No Values");
}
buffer.append("]");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
import mil.nga.giat.geowave.core.store.DataStoreEntryInfo;

abstract public class NumericRangeDataStatistics<T> extends
AbstractDataStatistics<T> {
AbstractDataStatistics<T>
{

private double min = Double.MAX_VALUE;
private double max = -Double.MAX_VALUE;
Expand All @@ -17,9 +18,12 @@ protected NumericRangeDataStatistics() {
super();
}

public NumericRangeDataStatistics(final ByteArrayId dataAdapterId,
final ByteArrayId statisticsId) {
super(dataAdapterId, statisticsId);
public NumericRangeDataStatistics(
final ByteArrayId dataAdapterId,
final ByteArrayId statisticsId ) {
super(
dataAdapterId,
statisticsId);
}

public boolean isSet() {
Expand Down Expand Up @@ -50,42 +54,59 @@ public byte[] toBinary() {
}

@Override
public void fromBinary(final byte[] bytes) {
public void fromBinary(
final byte[] bytes ) {
final ByteBuffer buffer = super.binaryBuffer(bytes);
min = buffer.getDouble();
max = buffer.getDouble();
}

@Override
public void entryIngested(final DataStoreEntryInfo entryInfo, final T entry) {
public void entryIngested(
final DataStoreEntryInfo entryInfo,
final T entry ) {
final NumericRange range = getRange(entry);
if (range != null) {
min = Math.min(min, range.getMin());
max = Math.max(max, range.getMax());
min = Math.min(
min,
range.getMin());
max = Math.max(
max,
range.getMax());
}
}

abstract protected NumericRange getRange(final T entry);
abstract protected NumericRange getRange(
final T entry );

@Override
public void merge(final Mergeable statistics) {
if ((statistics != null)
&& (statistics instanceof NumericRangeDataStatistics)) {
public void merge(
final Mergeable statistics ) {
if ((statistics != null) && (statistics instanceof NumericRangeDataStatistics)) {
final NumericRangeDataStatistics<T> stats = (NumericRangeDataStatistics<T>) statistics;
if (stats.isSet()) {
min = Math.min(min, stats.getMin());
max = Math.max(max, stats.getMax());
min = Math.min(
min,
stats.getMin());
max = Math.max(
max,
stats.getMax());
}
}
}

public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("range[adapter=").append(
buffer.append(
"range[adapter=").append(
super.getDataAdapterId().getString());
if (isSet()) {
buffer.append(", min=").append(getMin());
buffer.append(", max=").append(getMax());
buffer.append(
", min=").append(
getMin());
buffer.append(
", max=").append(
getMax());
}
else {
buffer.append(", No Values");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
* This data adapter will handle all reading/writing concerns for storing and
* retrieving GeoTools SimpleFeature objects to and from a GeoWave persistent
* store in Accumulo.
*
*
* If the implementor needs to write rows with particular visibility, this can
* be done by providing a FieldVisibilityHandler to a constructor or a
* VisibilityManagement to a constructor. When using VisibilityManagement, the
Expand All @@ -61,25 +61,25 @@
* attribute that contains the visibility meta-data.
* persistedType.getDescriptor("someAttributeName").getUserData().put(
* "visibility", Boolean.TRUE)
*
*
*
*
* The adapter will use the SimpleFeature's default geometry for spatial
* indexing.
*
*
* The adaptor will use the first temporal attribute (a Calendar or Date object)
* as the timestamp of a temporal index.
*
*
* If the feature type contains a UserData property 'time' for a specific time
* attribute with Boolean.TRUE, then the attribute is used as the timestamp of a
* temporal index.
*
*
* If the feature type contains UserData properties 'start' and 'end' for two
* different time attributes with value Boolean.TRUE, then the attributes are
* used for a range index.
*
*
* If the feature type contains a UserData property 'time' for *all* time
* attributes with Boolean.FALSE, then a temporal index is not used.
*
*
* Statistics configurations are maintained in UserData. Each attribute may have
* a UserData property called 'stats'. The associated value is an instance of
* {@link mil.nga.giat.geowave.adapter.vector.stats.StatsConfigurationCollection}
Expand All @@ -88,7 +88,7 @@
* type of statistic. The default statistics for geometry and temporal
* constraints cannot be changed, as they are critical components to the
* efficiency of query processing.
*
*
*/
@SuppressWarnings("unchecked")
public class FeatureDataAdapter extends
Expand Down

0 comments on commit 6cfc6bd

Please sign in to comment.