Skip to content

Commit

Permalink
Merge pull request #286 from jread-usgs/post_GML
Browse files Browse the repository at this point in the history
Post gml
  • Loading branch information
Jordan S Read committed Jun 10, 2016
2 parents 3f69fac + 7110556 commit b9f9d64
Show file tree
Hide file tree
Showing 7 changed files with 43 additions and 12 deletions.
8 changes: 5 additions & 3 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
Package: geoknife
Type: Package
Title: Web-Processing of Large Gridded Datasets
Version: 1.3.2
Date: 2016-06-08
Version: 1.3.3
Date: 2016-06-09
Authors@R: c( person("Jordan", "Read", role = c("aut","cre"),
email = "jread@usgs.gov"),
person("Jordan", "Walker", role = c("aut"),
Expand All @@ -14,7 +14,9 @@ Authors@R: c( person("Jordan", "Read", role = c("aut","cre"),
person("Emily", "Read", role = c("aut"),
email = "eread@usgs.gov"),
person("Luke", "Winslow", role = c("aut"),
email = "lwinslow@usgs.gov"))
email = "lwinslow@usgs.gov"),
person("Lindsay", "Carr", role = c("aut"),
email = "lcarr@usgs.gov"))
Description: Processes gridded datasets found on the U.S. Geological Survey
Geo Data Portal web application or elsewhere, using a web-enabled workflow
that eliminates the need to download and store large datasets that are reliably
Expand Down
4 changes: 4 additions & 0 deletions R/geoknife-generic.R
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@
#'Several quick start methods for creating a \code{\link{webdata}} object (only \code{\link{webdata}} or
#'an type that can be coerced into \code{\link{webdata}} are valid arguments for \code{fabric}).
#'
#'Making concurrent requests to the Geo Data Portal will NOT result in faster overall execution times.
#'The data backing the system is on high performance storage, but that storage is not meant to support
#'parallelized random access and can be significantly slower under these conditions. Read more:
#'https://my.usgs.gov/confluence/display/GeoDataPortal/Geo+Data+Portal+Scalability+Guidelines
#'@docType methods
#'@aliases
#'geoknife
Expand Down
1 change: 1 addition & 0 deletions R/parseTimeseries.R
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ parseTimeseriesConfig = function(file, delim){
nrows = blockEnd - skips - 1 # Number of ros per block.
features = unique(strsplit(fileLines[featureLine], split = delim)[[1]][-1]) # Parsing out feature identifiers
vars = sub(varMarker,"",fileLines[blockStart]) # Getting the variable names from the block starts.
vars = gsub(delim,'',vars)
config = list(vars = vars, features = features, skip = skips, nrows = nrows) # Return all the good stuff!
return(config)
}
Expand Down
2 changes: 1 addition & 1 deletion R/result.R
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ setMethod(f = "result",signature="geojob",
output <- outputParse(.Object, ...)
return(output)
} else {
stop('processing is incomplete or has failed. See checkProcess(). Processing status: ',
stop('processing is incomplete or has failed. See check(). Processing status: ',
check(.Object)$statusType)
}

Expand Down
33 changes: 26 additions & 7 deletions R/values-webgeom.R
Original file line number Diff line number Diff line change
Expand Up @@ -45,20 +45,39 @@ setMethod(f = "values",signature="webgeom",
}
)

wfsFilterFeatureXML <- function(.Object, knife=webprocess(), match.case = TRUE){
match.case.char <- ifelse(match.case, 'true','false')
top <- newXMLNode(name='wfs:GetFeature',
attrs=c('service'="WFS",'version'= version(.Object),
'xsi:schemaLocation' = paste(c(.Object@WFS_NAMESPACE,knife@WPS_SCHEMA_LOCATION),collapse=" ")),
namespaceDefinitions=c('ogc' = knife@OGC_NAMESPACE,
'wfs' = .Object@WFS_NAMESPACE,
'xsi' = knife@XSI_NAMESPACE,
'gml' = .Object@GML_NAMESPACE,
'ows' = knife@OWS_NAMESPACE))
q <- newXMLNode('wfs:Query', parent = top, attrs = c(typeName=geom(.Object)))
newXMLNode('ogc:PropertyName', parent = q, newXMLTextNode(.Object@attribute))
f <- newXMLNode('ogc:Filter', parent = q) # skipping namespace
Or <- newXMLNode('ogc:Or', parent = f)
for (val in values(.Object)){
p <- newXMLNode('ogc:PropertyIsEqualTo', parent=Or, attrs = c('matchCase'=match.case.char))
newXMLNode('ogc:PropertyName', parent = p, newXMLTextNode(.Object@attribute))
newXMLNode('ogc:Literal', parent = p, newXMLTextNode(val))
}

return(suppressWarnings(toString.XMLNode(top)))
}

#' @title fetch GML_IDs from WFS
#' @description fetch GML_IDs from WFS when geom, attribute, and values are specified
#' @param .Object a webgeom object
#' @keywords internal
fetchGML_IDs <- function(.Object){
url <- sprintf('%s?service=WFS&version=%s&request=GetFeature&typename=%s&MAXFEATURES=10000&propertyname=%s',
url(.Object), version(.Object), geom(.Object), .Object@attribute)
ns_geom <- strsplit(geom(.Object), ":")[[1]][1]
response <- gGET(url)
response <- suppressWarnings(gPOST(url=url(.Object), body=wfsFilterFeatureXML(.Object)))
xml <- gcontent(response)
ns_geom <- strsplit(geom(.Object), ":")[[1]][1]
value_path <- sprintf('//gml:featureMembers/%s/%s:%s', geom(.Object), ns_geom, .Object@attribute)
node_sets <- getNodeSet(xml, paste0(value_path,'/parent::node()'))
node_df <- do.call(rbind, lapply(node_sets, function(x) data.frame(
value=xmlValue(x), id=xmlAttrs(x)[['id']], stringsAsFactors=FALSE)))
gml_id <- node_df[node_df$value %in% values(.Object), 'id']
gml_id <- unname(unlist(lapply(node_sets, function(x) return(xmlAttrs(x)['id']))))
return(gml_id)
}
5 changes: 5 additions & 0 deletions man/geoknife-methods.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion tests/testthat/test-webdata_object.R
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ test_that("setters work", {
testthat::skip_on_cran()
wd <- webdata('prism',times = as.POSIXct(c('2001-01-01','2002-02-05')))
times(wd)[1] <- as.POSIXct('2000-01-01')
expect_equal(times(wd)[1],as.POSIXct('2000-01-01'))
expect_equal(as.numeric(times(wd)[1]-as.POSIXct('2000-01-01')), 0)
url(wd) <- 'www.badurlppppp.com'
expect_is(url(wd), "character")
})

0 comments on commit b9f9d64

Please sign in to comment.