Skip to content

Commit

Permalink
findbugs, formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
blastarr committed Aug 17, 2017
1 parent 3989365 commit fc2a975
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 106 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@

public class KMeansHullGenerator
{
private final static Logger LOGGER = LoggerFactory.getLogger(
KMeansHullGenerator.class);
private final static Logger LOGGER = LoggerFactory.getLogger(KMeansHullGenerator.class);

public static JavaPairRDD<Integer, Iterable<Vector>> groupByIndex(
final JavaRDD<Vector> inputPoints,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,31 +206,31 @@ public static DataAdapter writeClusterHulls(
for (final Tuple2<Integer, Geometry> hull : hullRdd.collect()) {
Integer index = hull._1;
Geometry geom = hull._2;

sfBuilder.set(
Geometry.class.getName(),
geom);

int count = 0;

for (Iterable<Vector> points : groupByRdd.lookup(index)) {
Vector[] pointVec = Iterables.toArray(
points,
Vector.class);
count += pointVec.length;
}

sfBuilder.set(
"Count",
count);

sfBuilder.set(
"Area",
geom.getArea());

sfBuilder.set(
"Density",
(double)count / geom.getArea());
(double) count / geom.getArea());

final SimpleFeature sf = sfBuilder.buildFeature("Hull-" + index);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,7 @@ public class KmeansSparkCommand extends
DefaultOperation implements
Command
{
private final static Logger LOGGER = LoggerFactory.getLogger(
KmeansSparkCommand.class);
private final static Logger LOGGER = LoggerFactory.getLogger(KmeansSparkCommand.class);

@Parameter(description = "<input storename> <output storename>")
private List<String> parameters = new ArrayList<String>();
Expand All @@ -65,10 +64,8 @@ public void execute(
"Requires arguments: <input storename> <output storename>");
}

final String inputStoreName = parameters.get(
0);
final String outputStoreName = parameters.get(
1);
final String inputStoreName = parameters.get(0);
final String outputStoreName = parameters.get(1);

// Config file
final File configFile = (File) params.getContext().get(
Expand All @@ -78,8 +75,7 @@ public void execute(
if (inputDataStore == null) {
final StoreLoader inputStoreLoader = new StoreLoader(
inputStoreName);
if (!inputStoreLoader.loadFromConfig(
configFile)) {
if (!inputStoreLoader.loadFromConfig(configFile)) {
throw new ParameterException(
"Cannot find input store: " + inputStoreLoader.getStoreName());
}
Expand All @@ -89,8 +85,7 @@ public void execute(
if (outputDataStore == null) {
final StoreLoader outputStoreLoader = new StoreLoader(
outputStoreName);
if (!outputStoreLoader.loadFromConfig(
configFile)) {
if (!outputStoreLoader.loadFromConfig(configFile)) {
throw new ParameterException(
"Cannot find output store: " + outputStoreLoader.getStoreName());
}
Expand All @@ -108,23 +103,17 @@ public void execute(
// Convert properties from DBScanOptions and CommonOptions
final PropertyManagementConverter converter = new PropertyManagementConverter(
properties);
converter.readProperties(
kMeansSparkOptions);
converter.readProperties(kMeansSparkOptions);

final KMeansRunner runner = new KMeansRunner();
runner.setAppName(
kMeansSparkOptions.getAppName());
runner.setMaster(
kMeansSparkOptions.getMaster());
runner.setAppName(kMeansSparkOptions.getAppName());
runner.setMaster(kMeansSparkOptions.getMaster());
runner.setSplits(
kMeansSparkOptions.getMinSplits(),
kMeansSparkOptions.getMaxSplits());
runner.setInputDataStore(
inputDataStore);
runner.setNumClusters(
kMeansSparkOptions.getNumClusters());
runner.setNumIterations(
kMeansSparkOptions.getNumIterations());
runner.setInputDataStore(inputDataStore);
runner.setNumClusters(kMeansSparkOptions.getNumClusters());
runner.setNumIterations(kMeansSparkOptions.getNumIterations());

ScaledTemporalRange scaledRange = null;

Expand All @@ -141,26 +130,22 @@ public void execute(
adapterId);

if (scaledRange == null) {
LOGGER.error(
"Failed to set time params for kmeans. Please specify a valid feature type.");
LOGGER.error("Failed to set time params for kmeans. Please specify a valid feature type.");
throw new ParameterException(
"--useTime option: Failed to set time params");
}
}

if (kMeansSparkOptions.getEpsilon() != null) {
runner.setEpsilon(
kMeansSparkOptions.getEpsilon());
runner.setEpsilon(kMeansSparkOptions.getEpsilon());
}

if (kMeansSparkOptions.getAdapterId() != null) {
runner.setAdapterId(
kMeansSparkOptions.getAdapterId());
runner.setAdapterId(kMeansSparkOptions.getAdapterId());
}

if (kMeansSparkOptions.getCqlFilter() != null) {
runner.setCqlFilter(
kMeansSparkOptions.getCqlFilter());
runner.setCqlFilter(kMeansSparkOptions.getCqlFilter());
}

stopwatch.reset();
Expand All @@ -176,8 +161,7 @@ public void execute(
}

stopwatch.stop();
LOGGER.debug(
"KMeans runner took " + stopwatch.getTimeString());
LOGGER.debug("KMeans runner took " + stopwatch.getTimeString());

final KMeansModel clusterModel = runner.getOutputModel();

Expand All @@ -200,8 +184,7 @@ public void execute(
kMeansSparkOptions.getHullTypeName());

stopwatch.stop();
LOGGER.debug(
"KMeans hull generation took " + stopwatch.getTimeString());
LOGGER.debug("KMeans hull generation took " + stopwatch.getTimeString());
}
}

Expand All @@ -212,8 +195,7 @@ public List<String> getParameters() {
public void setParameters(
final String storeName ) {
parameters = new ArrayList<String>();
parameters.add(
storeName);
parameters.add(storeName);
}

public DataStorePluginOptions getInputStoreOptions() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,7 @@
@RunWith(GeoWaveITRunner.class)
public class GeoWaveJavaSparkKMeansIT
{
private final static Logger LOGGER = LoggerFactory.getLogger(
GeoWaveJavaSparkKMeansIT.class);
private final static Logger LOGGER = LoggerFactory.getLogger(GeoWaveJavaSparkKMeansIT.class);

protected static final String HAIL_TEST_CASE_PACKAGE = TestUtils.TEST_CASE_BASE + "hail_test_case/";
protected static final String HAIL_SHAPEFILE_FILE = HAIL_TEST_CASE_PACKAGE + "hail.shp";
Expand All @@ -69,38 +68,28 @@ public class GeoWaveJavaSparkKMeansIT
@BeforeClass
public static void reportTestStart() {
startMillis = System.currentTimeMillis();
LOGGER.warn(
"-----------------------------------------");
LOGGER.warn(
"* *");
LOGGER.warn(
"* RUNNING GeoWaveJavaSparkKMeansIT *");
LOGGER.warn(
"* *");
LOGGER.warn(
"-----------------------------------------");
LOGGER.warn("-----------------------------------------");
LOGGER.warn("* *");
LOGGER.warn("* RUNNING GeoWaveJavaSparkKMeansIT *");
LOGGER.warn("* *");
LOGGER.warn("-----------------------------------------");
}

@AfterClass
public static void reportTestFinish() {
LOGGER.warn(
"-----------------------------------------");
LOGGER.warn(
"* *");
LOGGER.warn(
"* FINISHED GeoWaveJavaSparkKMeansIT *");
LOGGER.warn(
"* " + ((System.currentTimeMillis() - startMillis) / 1000) + "s elapsed. *");
LOGGER.warn(
"* *");
LOGGER.warn(
"-----------------------------------------");
LOGGER.warn("-----------------------------------------");
LOGGER.warn("* *");
LOGGER.warn("* FINISHED GeoWaveJavaSparkKMeansIT *");
LOGGER
.warn("* " + ((System.currentTimeMillis() - startMillis) / 1000)
+ "s elapsed. *");
LOGGER.warn("* *");
LOGGER.warn("-----------------------------------------");
}

@Test
public void testKMeansRunner() {
TestUtils.deleteAll(
inputDataStore);
TestUtils.deleteAll(inputDataStore);

// Load data
TestUtils.testLocalIngest(
Expand All @@ -113,12 +102,9 @@ public void testKMeansRunner() {

// Create the runner
final KMeansRunner runner = new KMeansRunner();
runner.setInputDataStore(
inputDataStore);
runner.setAdapterId(
adapterId);
runner.setCqlFilter(
CQL_FILTER);
runner.setInputDataStore(inputDataStore);
runner.setAdapterId(adapterId);
runner.setCqlFilter(CQL_FILTER);

// Attempt to set the time params
ScaledTemporalRange scaledRange = KMeansUtils.setRunnerTimeParams(
Expand All @@ -128,11 +114,9 @@ public void testKMeansRunner() {
adapterId));

if (scaledRange == null) {
Assert.fail(
"Failed to set time params");
Assert.fail("Failed to set time params");

TestUtils.deleteAll(
inputDataStore);
TestUtils.deleteAll(inputDataStore);

runner.closeContext();
}
Expand Down Expand Up @@ -165,21 +149,17 @@ public void testKMeansRunner() {
final JavaPairRDD<Integer, Iterable<Vector>> groupByRDD = KMeansHullGenerator.groupByIndex(
runner.getInputCentroids(),
clusterModel);
final JavaPairRDD<Integer, Geometry> hullsRDD = KMeansHullGenerator.generateHullsRDD(
groupByRDD);
final JavaPairRDD<Integer, Geometry> hullsRDD = KMeansHullGenerator.generateHullsRDD(groupByRDD);

Assert.assertTrue(
"centroids from the model should match the hull count",
clusterModel.clusterCenters().length == hullsRDD.count());

System.out.println(
"KMeans cluster hulls:");
System.out.println("KMeans cluster hulls:");
for (final Tuple2<Integer, Geometry> hull : hullsRDD.collect()) {
System.out.println(
"> Hull size (verts): " + hull._2.getNumPoints());
System.out.println("> Hull size (verts): " + hull._2.getNumPoints());

System.out.println(
"> Hull centroid: " + hull._2.getCentroid().toString());
System.out.println("> Hull centroid: " + hull._2.getCentroid().toString());

}

Expand All @@ -195,8 +175,7 @@ public void testKMeansRunner() {
hullAdapter,
clusterModel.clusterCenters().length);

TestUtils.deleteAll(
inputDataStore);
TestUtils.deleteAll(inputDataStore);

runner.closeContext();
}
Expand All @@ -221,35 +200,27 @@ private void queryFeatures(

final SimpleFeature isFeat = (SimpleFeature) maybeFeat;

final Geometry geom = (Geometry) isFeat.getAttribute(
0);
final Geometry geom = (Geometry) isFeat.getAttribute(0);

count++;
LOGGER.warn(
count + ": " + isFeat.getID() + " - " + geom.toString());
LOGGER.warn(count + ": " + isFeat.getID() + " - " + geom.toString());

for (AttributeDescriptor attrDesc : isFeat.getFeatureType().getAttributeDescriptors()) {
final Class<?> bindingClass = attrDesc.getType().getBinding();
if (TimeUtils.isTemporal(
bindingClass)) {
if (TimeUtils.isTemporal(bindingClass)) {
String timeField = attrDesc.getLocalName();
Date time = (Date) isFeat.getAttribute(
timeField);
LOGGER.warn(
" time = " + time);
Date time = (Date) isFeat.getAttribute(timeField);
LOGGER.warn(" time = " + time);
}
else {
LOGGER.warn(
attrDesc.getLocalName() + " = " + isFeat.getAttribute(
attrDesc.getLocalName()));
LOGGER.warn(attrDesc.getLocalName() + " = " + isFeat.getAttribute(attrDesc.getLocalName()));
}
}

}

LOGGER.warn(
"Counted " + count + " features in datastore for " + StringUtils.stringFromBinary(
dataAdapter.getAdapterId().getBytes()));
LOGGER.warn("Counted " + count + " features in datastore for "
+ StringUtils.stringFromBinary(dataAdapter.getAdapterId().getBytes()));
}
catch (final Exception e) {
e.printStackTrace();
Expand Down

0 comments on commit fc2a975

Please sign in to comment.