Skip to content

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also .

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also .
  • 8 commits
  • 18 files changed
  • 0 commit comments
  • 4 contributors
Commits on Nov 20, 2013
@prusso-sse prusso-sse Removed TODOs and blasts of stack traces on exceptions and logged the…
…m instead.
349f33d
@prusso-sse prusso-sse Merge branch 'master' of https://github.com/USGS-CIDA/geo-data-portal 06a59ef
Commits on Dec 04, 2013
@dblodgett-usgs dblodgett-usgs Adding sample shapefiles to geoserver, will likely need to get some c…
…onfiguration for them into the overlay as well.
cfac152
Commits on Feb 19, 2014
@dblodgett-usgs dblodgett-usgs Modified BioClim processing script to work with the UofI metdata and …
…put in a check for F or K temperatures.
2d1c994
@jiwalker-usgs Merge pull request #77 from prusso-sse/master
Removed stack traces and logged exceptions as requested.
7042805
@jiwalker-usgs Merge pull request #82 from dblodgett-usgs/master
A couple bug fixes to BioClim. Ready to move up the stack.
cee48f0
@jiwalker-usgs Merge pull request #83 from jiwalker-usgs/master
Getting back into the swing of things with some cleanup
ddea73d
Commits on Feb 21, 2014
@jiwalker-usgs Temporary files for processes are now stored in WorkSpace rather than…
… tmpdir.

* also removed auto-generated licence header
508e8f7
Showing with 61 additions and 46 deletions.
  1. +2 −1 ...ss-wps/src/main/java/gov/usgs/cida/gdp/wps/algorithm/FeatureCategoricalGridCoverageAlgorithm.java
  2. +2 −1 ...ps/src/main/java/gov/usgs/cida/gdp/wps/algorithm/FeatureCoverageOPeNDAPIntersectionAlgorithm.java
  3. +2 −1 gdp-process-wps/src/main/java/gov/usgs/cida/gdp/wps/algorithm/FeatureGridStatisticsAlgorithm.java
  4. +2 −1 ...ess-wps/src/main/java/gov/usgs/cida/gdp/wps/algorithm/FeatureWeightedGridStatisticsAlgorithm.java
  5. +0 −4 gdp-process-wps/src/main/java/gov/usgs/cida/gdp/wps/algorithm/GDPAlgorithmConstants.java
  6. +5 −4 gdp-process-wps/src/main/java/gov/usgs/cida/gdp/wps/algorithm/PRMSParameterGeneratorAlgorithm.java
  7. +6 −14 gdp-process-wps/src/main/java/gov/usgs/cida/gdp/wps/parser/GMLStreamingFeatureCollection.java
  8. +2 −1 gdp-process-wps/src/main/java/gov/usgs/cida/gdp/wps/parser/GMLStreamingParser.java
  9. +2 −1 gdp-process-wps/src/main/java/gov/usgs/cida/gdp/wps/parser/GeoTIFFParser.java
  10. +2 −1 gdp-process-wps/src/main/java/gov/usgs/cida/gdp/wps/util/WCSUtil.java
  11. +36 −17 gdp-process-wps/src/main/webapp/R/scripts/gridded_bioclim.R
  12. BIN geoserver/src/main/webapp/data/Shapefiles/Samples/Alaska.zip
  13. BIN geoserver/src/main/webapp/data/Shapefiles/Samples/CONUS_states.zip
  14. BIN geoserver/src/main/webapp/data/Shapefiles/Samples/CSC_Boundaries.zip
  15. BIN geoserver/src/main/webapp/data/Shapefiles/Samples/FWS_LCC.zip
  16. BIN geoserver/src/main/webapp/data/Shapefiles/Samples/Level_III_Ecoregions.zip
  17. BIN geoserver/src/main/webapp/data/Shapefiles/Samples/nps_boundary.zip
  18. BIN geoserver/src/main/webapp/data/Shapefiles/Samples/simplified_HUC8s.zip
View
3 ...rc/main/java/gov/usgs/cida/gdp/wps/algorithm/FeatureCategoricalGridCoverageAlgorithm.java
@@ -1,5 +1,6 @@
package gov.usgs.cida.gdp.wps.algorithm;
+import gov.usgs.cida.gdp.constants.AppConstant;
import gov.usgs.cida.gdp.coreprocessing.Delimiter;
import gov.usgs.cida.gdp.coreprocessing.analysis.grid.FeatureCategoricalGridCoverage;
import gov.usgs.cida.gdp.wps.binding.CSVFileBinding;
@@ -110,7 +111,7 @@ public void process() {
try {
- output = File.createTempFile(getClass().getSimpleName(), delimiter.extension);
+ output = File.createTempFile(getClass().getSimpleName(), delimiter.extension, new File(AppConstant.WORK_LOCATION.getValue()));
writer = new BufferedWriter(new FileWriter(output));
for (String currentDatasetId : datasetId) {
View
3 ...ain/java/gov/usgs/cida/gdp/wps/algorithm/FeatureCoverageOPeNDAPIntersectionAlgorithm.java
@@ -1,5 +1,6 @@
package gov.usgs.cida.gdp.wps.algorithm;
+import gov.usgs.cida.gdp.constants.AppConstant;
import gov.usgs.cida.gdp.wps.binding.GMLStreamingFeatureCollectionBinding;
import gov.usgs.cida.gdp.wps.binding.NetCDFFileBinding;
import java.io.File;
@@ -104,7 +105,7 @@ public void process() {
GridDataset gridDataSet = null;
try {
gridDataSet = GDPAlgorithmUtil.generateGridDataSet(datasetURI);
- output = File.createTempFile(getClass().getSimpleName(), ".nc");
+ output = File.createTempFile(getClass().getSimpleName(), ".nc", new File(AppConstant.WORK_LOCATION.getValue()));
NetCDFGridWriter.makeFile(
output.getAbsolutePath(),
gridDataSet,
View
3 ...ess-wps/src/main/java/gov/usgs/cida/gdp/wps/algorithm/FeatureGridStatisticsAlgorithm.java
@@ -1,5 +1,6 @@
package gov.usgs.cida.gdp.wps.algorithm;
+import gov.usgs.cida.gdp.constants.AppConstant;
import gov.usgs.cida.gdp.coreprocessing.Delimiter;
import gov.usgs.cida.gdp.coreprocessing.analysis.grid.FeatureCoverageGridStatistics;
import gov.usgs.cida.gdp.coreprocessing.analysis.grid.Statistics1DWriter.GroupBy;
@@ -182,7 +183,7 @@ public void process() {
return;
}
- output = File.createTempFile(getClass().getSimpleName(), delimiter.extension);
+ output = File.createTempFile(getClass().getSimpleName(), delimiter.extension, new File(AppConstant.WORK_LOCATION.getValue()));
writer = new BufferedWriter(new FileWriter(output));
for (String currentDatasetId : datasetId) {
View
3 ...src/main/java/gov/usgs/cida/gdp/wps/algorithm/FeatureWeightedGridStatisticsAlgorithm.java
@@ -1,5 +1,6 @@
package gov.usgs.cida.gdp.wps.algorithm;
+import gov.usgs.cida.gdp.constants.AppConstant;
import gov.usgs.cida.gdp.coreprocessing.Delimiter;
import gov.usgs.cida.gdp.coreprocessing.analysis.grid.FeatureCoverageWeightedGridStatistics;
import gov.usgs.cida.gdp.coreprocessing.analysis.grid.Statistics1DWriter.GroupBy;
@@ -181,7 +182,7 @@ public void process() {
addError("Attribute " + featureAttributeName + " not found in feature collection");
return;
}
- output = File.createTempFile(getClass().getSimpleName(), delimiter.extension);
+ output = File.createTempFile(getClass().getSimpleName(), delimiter.extension, new File(AppConstant.WORK_LOCATION.getValue()));
writer = new BufferedWriter(new FileWriter(output));
for (String currentDatasetId : datasetId) {
View
4 gdp-process-wps/src/main/java/gov/usgs/cida/gdp/wps/algorithm/GDPAlgorithmConstants.java
@@ -1,7 +1,3 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
package gov.usgs.cida.gdp.wps.algorithm;
/**
View
9 ...ss-wps/src/main/java/gov/usgs/cida/gdp/wps/algorithm/PRMSParameterGeneratorAlgorithm.java
@@ -1,6 +1,7 @@
package gov.usgs.cida.gdp.wps.algorithm;
import com.google.common.base.Joiner;
+import gov.usgs.cida.gdp.constants.AppConstant;
import gov.usgs.cida.gdp.coreprocessing.Delimiter;
import gov.usgs.cida.gdp.coreprocessing.analysis.grid.FeatureCoverageWeightedGridStatistics;
import gov.usgs.cida.gdp.coreprocessing.analysis.grid.Statistics1DWriter.GroupBy;
@@ -244,8 +245,8 @@ public void process() {
addError("Attribute " + featureAttributeName + " is not Numeric type, unable to properly index HRU");
}
- prmsParamFile = File.createTempFile(getClass().getSimpleName(), ".param");
- prmsDataFile = File.createTempFile(getClass().getSimpleName(), ".data");
+ prmsParamFile = File.createTempFile(getClass().getSimpleName(), ".param", new File(AppConstant.WORK_LOCATION.getValue()));
+ prmsDataFile = File.createTempFile(getClass().getSimpleName(), ".data", new File(AppConstant.WORK_LOCATION.getValue()));
prmsParamWriter = new BufferedWriter(new FileWriter(prmsParamFile));
prmsDataWriter = new BufferedWriter(new FileWriter(prmsDataFile));
@@ -267,7 +268,7 @@ public void process() {
timeEnd);
// TODO: all I/O instances need try/finally cleanup
- csvFileList.add(File.createTempFile(getClass().getSimpleName(), ".temp.csv"));
+ csvFileList.add(File.createTempFile(getClass().getSimpleName(), ".temp.csv", new File(AppConstant.WORK_LOCATION.getValue())));
BufferedWriter csvWriter = null;
try {
csvWriter = new BufferedWriter(new FileWriter(csvFileList.get(inputIndex)));
@@ -320,7 +321,7 @@ public void process() {
FileInputStream prmsParamsInputStream = null;
FileInputStream prmsDataInputStream = null;
try {
- output = File.createTempFile(getClass().getName(), ".zip");
+ output = File.createTempFile(getClass().getName(), ".zip", new File(AppConstant.WORK_LOCATION.getValue()));
zipOutputStream = new ZipOutputStream(
new FileOutputStream(output));
prmsParamsInputStream = new FileInputStream(prmsParamFile);
View
20 ...process-wps/src/main/java/gov/usgs/cida/gdp/wps/parser/GMLStreamingFeatureCollection.java
@@ -290,8 +290,6 @@ private StreamingFeatureIterator(Filter filter, boolean wrap) throws ParserConfi
this.filter = filter;
this.wrap = wrap;
- LOGGER.debug("StreamingFeatureIterator() : FILENAME [" + file.getName() + "]");
-
fileInputStream = new FileInputStream(file);
bufferedInputStream = new BufferedInputStream(
@@ -314,14 +312,11 @@ public synchronized boolean hasNext() {
try {
findNext();
} catch (XMLStreamException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
+ LOGGER.debug("StreamingFeatureIterator.hasNext() XMLStreamException: " + e.getMessage());
} catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
+ LOGGER.debug("StreamingFeatureIterator.hasNext() IOException: " + e.getMessage());
} catch (SAXException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
+ LOGGER.debug("StreamingFeatureIterator.hasNext() SAXException: " + e.getMessage());
}
}
return next != null;
@@ -347,8 +342,7 @@ public synchronized void close() {
try {
fileInputStream.close();
} catch (IOException e) {
- // do nothing, cleaning up
- e.printStackTrace();
+ LOGGER.debug("StreamingFeatureIterator.close() IOException: " + e.getMessage());
}
fileInputStream = null;
}
@@ -357,8 +351,7 @@ public synchronized void close() {
try {
bufferedInputStream.close();
} catch (IOException e) {
- // do nothing, cleaning up
- e.printStackTrace();
+ LOGGER.debug("StreamingFeatureIterator.close() IOException: " + e.getMessage());
}
bufferedInputStream = null;
}
@@ -367,8 +360,7 @@ public synchronized void close() {
try {
parser.close();
} catch (XMLStreamException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
+ LOGGER.debug("StreamingFeatureIterator.close() XMLStreamException: " + e.getMessage());
}
parser = null;
}
View
3 gdp-process-wps/src/main/java/gov/usgs/cida/gdp/wps/parser/GMLStreamingParser.java
@@ -1,5 +1,6 @@
package gov.usgs.cida.gdp.wps.parser;
+import gov.usgs.cida.gdp.constants.AppConstant;
import gov.usgs.cida.gdp.wps.binding.GMLStreamingFeatureCollectionBinding;
import java.io.File;
import java.io.IOException;
@@ -16,7 +17,7 @@ public GMLStreamingParser() {
@Override
public GMLStreamingFeatureCollectionBinding parse(InputStream input, String mimeType, String schema) {
try {
- File tempFile = File.createTempFile(getClass().getSimpleName(), ".xml");
+ File tempFile = File.createTempFile(getClass().getSimpleName(), ".xml", new File(AppConstant.WORK_LOCATION.getValue()));
FileUtils.copyInputStreamToFile(input, tempFile);
return new GMLStreamingFeatureCollectionBinding(new GMLStreamingFeatureCollection(tempFile));
} catch (IOException e) {
View
3 gdp-process-wps/src/main/java/gov/usgs/cida/gdp/wps/parser/GeoTIFFParser.java
@@ -1,5 +1,6 @@
package gov.usgs.cida.gdp.wps.parser;
+import gov.usgs.cida.gdp.constants.AppConstant;
import gov.usgs.cida.gdp.wps.binding.GeoTIFFFileBinding;
import gov.usgs.cida.gdp.wps.util.GeoTIFFUtil;
import gov.usgs.cida.gdp.wps.util.MIMEMultipartStream;
@@ -35,7 +36,7 @@ public GeoTIFFFileBinding parse(InputStream inputStream, String mimeType, String
try {
- tempFile = File.createTempFile(getClass().getSimpleName(), ".tmp");
+ tempFile = File.createTempFile(getClass().getSimpleName(), ".tmp", new File(AppConstant.WORK_LOCATION.getValue()));
FileUtils.copyInputStreamToFile(inputStream, tempFile);
ByteBuffer buffer = ByteBuffer.allocate(4 + MIMEMultipartStream.MAX_BOUNDARY_LENGTH);
View
3 gdp-process-wps/src/main/java/gov/usgs/cida/gdp/wps/util/WCSUtil.java
@@ -1,5 +1,6 @@
package gov.usgs.cida.gdp.wps.util;
+import gov.usgs.cida.gdp.constants.AppConstant;
import gov.usgs.cida.gdp.dataaccess.CoverageMetaData;
import java.io.BufferedOutputStream;
import java.io.File;
@@ -309,7 +310,7 @@ public static File generateTIFFFile(URI wcsURI, String wcsIdentifier, Referenced
if (GeoTIFFUtil.isAllowedMimeType(contentType)) {
String contentTransferEncoding = headerMap.get("Content-Transfer-Encoding");
if (contentTransferEncoding != null) {
- tiffFile = File.createTempFile("gdp", ".tiff");
+ tiffFile = File.createTempFile("gdp", ".tiff", new File(AppConstant.WORK_LOCATION.getValue()));
OutputStream tiffOutputStream = new BufferedOutputStream(new FileOutputStream(tiffFile));
mimeMultipartStream.readBodyData(tiffOutputStream, contentTransferEncoding);
tiffOutputStream.close();
View
53 gdp-process-wps/src/main/webapp/R/scripts/gridded_bioclim.R
@@ -146,7 +146,13 @@ dailyToMonthly<-function(daily_data, time, origin, cells)
request_time_bounds<-function(ncdf4_handle, start, end)
{
- time_units<-strsplit(ncdf4_handle$dim$time$units, " ")[[1]]
+ if (!is.null(ncdf4_handle$dim$time$units)) {
+ time_units<-strsplit(ncdf4_handle$dim$time$units, " ")[[1]]
+ time_dim<-ncdf4_handle$dim$time
+ } else if (!is.null(ncdf4_handle$dim$day$units)) {
+ time_units<-strsplit(ncdf4_handle$dim$day$units, " ")[[1]]
+ time_dim<-ncdf4_handle$dim$day
+ } else stop(paste("No time dimension found. Time dimensions called time and day are supported."))
time_step<-time_units[1]
date_origin<-time_units[3]
time_origin<-"00:00:00"
@@ -160,23 +166,30 @@ request_time_bounds<-function(ncdf4_handle, start, end)
t_1 <- julian(strptime(paste(start,'-01-01 12:00',sep=''), '%Y-%m-%d %H:%M'), origin<-strptime(cal_origin, '%Y-%m-%d %H:%M:%S'))
t_2 <- julian(strptime(paste(end, '-01-01 00:00', sep=''), '%Y-%m-%d %H:%M'), origin<-strptime(cal_origin, '%Y-%m-%d %H:%M:%S'))
# Some simple time and bbox validation.
- if (t_1<head(ncdf4_handle$dim$time$vals,1)) stop(paste("Submitted start date,",start, "is before the dataset's start date,",chron(floor(head(ncdf4_handle$dim$time$vals,1)),out.format=c(dates="year-m-day"), origin=chron_origin)))
- if (t_2>tail(ncdf4_handle$dim$time$vals,1)) stop(paste("Submitted end date,",end, "is after the dataset's end date,",chron(floor(tail(ncdf4_handle$dim$time$vals,1)),out.format=c(dates="year-m-day"), origin=chron_origin)))
+ if (t_1<head(time_dim$vals,1)) stop(paste("Submitted start date,",start, "is before the dataset's start date,",chron(floor(head(time_dim$vals,1)),out.format=c(dates="year-m-day"), origin=chron_origin)))
+ if (t_2>tail(time_dim$vals,1)) stop(paste("Submitted end date,",end, "is after the dataset's end date,",chron(floor(tail(time_dim$vals,1)),out.format=c(dates="year-m-day"), origin=chron_origin)))
if (t_1>t_2) stop('Start date must be before end date.')
- t_ind1 <- min(which(abs(ncdf4_handle$dim$time$vals-t_1)==min(abs(ncdf4_handle$dim$time$vals-t_1))))
- t_ind2 <- max(which(abs(ncdf4_handle$dim$time$vals-t_2)==min(abs(ncdf4_handle$dim$time$vals-t_2))))
- time<-dods_data$dim$time$vals[t_ind1:(t_ind2-1)]
+ t_ind1 <- min(which(abs(time_dim$vals-t_1)==min(abs(time_dim$vals-t_1))))
+ t_ind2 <- max(which(abs(time_dim$vals-t_2)==min(abs(time_dim$vals-t_2))))
+ time<-time_dim$vals[t_ind1:(t_ind2-1)]
return(list(t_ind1=t_ind1, t_ind2=t_ind2, time=time, origin=chron_origin))
}
bbox_in <- as.double(read.csv(header=F,colClasses=c("character"),text=bbox_in))
bioclims <- as.double(read.csv(header=F,colClasses=c("character"),text=bioclims))
+if (3 %in% bioclims & !7 %in% bioclims) {
+ bioclims<-append(bioclims,7)
+ pop_seven<-TRUE
+} else {pop_seven<-FALSE}
# Define Inputs (will come from external call)
tryCatch(dods_data <- nc_open(OPeNDAP_URI), error = function(e)
{
cat("An error was encountered trying to open the OPeNDAP resource."); print(e)
})
variables<-as.character(sapply(dods_data$var,function(x) x$name))
if (!tmax_var %in% variables) stop(paste("The given tmax variable wasn't found in the OPeNDAP dataset"))
+t_unit_multiplier<-function(t) {t}
+if (grepl('k',ncatt_get(dods_data, tmax_var,'units')$value, ignore.case = TRUE)) {t_unit_multiplier <- function(t) {t-273} }
+if (grepl('f',ncatt_get(dods_data, tmax_var,'units')$value, ignore.case = TRUE)) {t_unit_multiplier <- function(t) {(t-32)*(5/9)} }
if (!tmin_var %in% variables) stop(paste("The given tmin variable wasn't found in the OPeNDAP dataset"))
if (!prcp_var %in% variables) stop(paste("The given prcp variable wasn't found in the OPeNDAP dataset"))
if (tave_var!="NULL") if (!tmax_var %in% variables) stop(paste("The given tave variable wasn't found in the OPeNDAP dataset"))
@@ -213,10 +226,10 @@ for (year in as.numeric(start):(as.numeric(end)))
time<-request_time_indices$time
origin<-request_time_indices$origin
# !!! Make sure this is robust for network failures. !!!
- tmax_data <- ncvar_get(dods_data, tmax_var, c(min(x1,x2),min(y1,y2),t_ind1),c((abs(x1-x2)+1),(abs(y1-y2)+1),(t_ind2-t_ind1)))
- tmin_data <- ncvar_get(dods_data, tmin_var, c(min(x1,x2),min(y1,y2),t_ind1),c((abs(x1-x2)+1),(abs(y1-y2)+1),(t_ind2-t_ind1)))
+ tmax_data <- t_unit_multiplier(ncvar_get(dods_data, tmax_var, c(min(x1,x2),min(y1,y2),t_ind1),c((abs(x1-x2)+1),(abs(y1-y2)+1),(t_ind2-t_ind1))))
+ tmin_data <- t_unit_multiplier(ncvar_get(dods_data, tmin_var, c(min(x1,x2),min(y1,y2),t_ind1),c((abs(x1-x2)+1),(abs(y1-y2)+1),(t_ind2-t_ind1))))
prcp_data <- ncvar_get(dods_data, prcp_var, c(min(x1,x2),min(y1,y2),t_ind1),c((abs(x1-x2)+1),(abs(y1-y2)+1),(t_ind2-t_ind1)))
- if (tave_var!="NULL") tave_data <- ncvar_get(dods_data, tave_var, c(min(x1,x2),min(y1,y2),t_ind1),c((abs(x1-x2)+1),(abs(y1-y2)+1),(t_ind2-t_ind1))) else tave_data <- (tmax_data+tmin_data)/2
+ if (tave_var!="NULL") tave_data <- t_unit_multiplier(ncvar_get(dods_data, tave_var, c(min(x1,x2),min(y1,y2),t_ind1),c((abs(x1-x2)+1),(abs(y1-y2)+1),(t_ind2-t_ind1)))) else tave_data <- (tmax_data+tmin_data)/2
cells<-nrow(tmax_data)*ncol(tmax_data)
tmax_data <- matrix(tmax_data,t_ind2-t_ind1,cells,byrow = TRUE)
tmin_data <- matrix(tmin_data,t_ind2-t_ind1,cells,byrow = TRUE)
@@ -248,15 +261,21 @@ for (year in as.numeric(start):(as.numeric(end)))
tmin_data<-tmin_data[mask,]
prcp_data<-prcp_data[mask,]
tave_data<-tave_data[mask,]
- bioclim<-data.frame(bioclim(tmin=tmin_data, tmax=tmax_data, prec=prcp_data, tmean=tave_data, bioclims))
- colnames(bioclim)<-paste('bioclim_',bioclims, sep='')
- for (bclim in names(bioclim))
+ bioclim_out<-data.frame(bioclim(tmin=tmin_data, tmax=tmax_data, prec=prcp_data, tmean=tave_data, bioclims))
+ colnames(bioclim_out)<-paste('bioclim_',bioclims, sep='')
+ for (bclim in names(bioclim_out))
{
- data_to_write <- SpatialPixelsDataFrame(SpatialPoints(coords, proj4string = CRS(prj)), bioclim[bclim], tolerance=0.0001)
- file_name<-paste(bclim,'_',as.character(year),'.tif',sep='')
- fileNames[fileStep]<-file_name
- fileStep<-fileStep+1
- writeGDAL(data_to_write,file_name)
+ if (pop_seven==TRUE & bclim=='bioclim_7') {
+ paste('Passed BioClim 7')
+ } else
+ {
+ data_to_write <- SpatialPixelsDataFrame(SpatialPoints(coords, proj4string = CRS(prj)), bioclim_out[bclim], tolerance=0.0001)
+ file_name<-paste(bclim,'_',as.character(year),'.tif',sep='')
+ fileNames[fileStep]<-file_name
+ fileStep<-fileStep+1
+ writeGDAL(data_to_write,file_name)
+ }
+
}
}
name<-'bioclim.zip'
View
BIN geoserver/src/main/webapp/data/Shapefiles/Samples/Alaska.zip
Binary file not shown.
View
BIN geoserver/src/main/webapp/data/Shapefiles/Samples/CONUS_states.zip
Binary file not shown.
View
BIN geoserver/src/main/webapp/data/Shapefiles/Samples/CSC_Boundaries.zip
Binary file not shown.
View
BIN geoserver/src/main/webapp/data/Shapefiles/Samples/FWS_LCC.zip
Binary file not shown.
View
BIN geoserver/src/main/webapp/data/Shapefiles/Samples/Level_III_Ecoregions.zip
Binary file not shown.
View
BIN geoserver/src/main/webapp/data/Shapefiles/Samples/nps_boundary.zip
Binary file not shown.
View
BIN geoserver/src/main/webapp/data/Shapefiles/Samples/simplified_HUC8s.zip
Binary file not shown.

No commit comments for this range

Something went wrong with that request. Please try again.