Skip to content

Commit

Permalink
Add clipToGrid functionality
Browse files Browse the repository at this point in the history
  • Loading branch information
lossyrob authored and echeipesh committed Oct 20, 2017
1 parent 9881296 commit 1d937db
Show file tree
Hide file tree
Showing 6 changed files with 235 additions and 54 deletions.
111 changes: 111 additions & 0 deletions spark/src/main/scala/geotrellis/spark/clip/ClipToGrid.scala
@@ -0,0 +1,111 @@
/*
* Copyright 2017 Azavea
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package geotrellis.spark.clip

import geotrellis.raster.GridBounds
import geotrellis.spark._
import geotrellis.spark.tiling._
import geotrellis.util._
import geotrellis.vector._

import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory
import org.apache.spark.rdd._

object ClipToGrid {
def apply[G <: Geometry, D](
rdd: RDD[Feature[G, D]],
layout: LayoutDefinition
): RDD[(SpatialKey, Feature[Geometry, D])] =
apply[G, D, D](rdd, layout, { (_, d) => Some(d) })

def apply[G <: Geometry, D1, D2](
rdd: RDD[Feature[G, D1]],
layout: LayoutDefinition,
clipData: (Geometry, D1) => Option[D2]
): RDD[(SpatialKey, Feature[Geometry, D2])] = {
val mapTransform: MapKeyTransform = layout.mapTransform

rdd.flatMap { f => clipGeom(mapTransform, clipData, f) }
}

private def clipGeom[G <: Geometry, D1, D2](
mapTransform: MapKeyTransform,
clipData: (Geometry, D1) => Option[D2],
feature: Feature[G, D1]
): Iterator[(SpatialKey, Feature[Geometry, D2])] = {

def clipFeature(k: SpatialKey, contains: (Extent, G) => Boolean): Option[(SpatialKey, Feature[Geometry, D2])] = {
val extent = mapTransform(k)

val clipped: Option[Geometry] =
feature.geom match {
case p: Point =>
Some(p)
case g =>
g.intersection(extent).toGeometry
}

for(g <- clipped; d <- clipData(g, feature.data)) yield {
(k, Feature(g, d))
}
}

val iterator: Iterator[(SpatialKey, Feature[Geometry, D2])] =
feature.geom match {
case p: Point =>
val k = mapTransform(p)
Iterator(clipData(p, feature.data).map { d => (k, Feature(p, d)) }).flatten
case mp: MultiPoint =>
mp.points
.map(mapTransform(_))
.distinct
.map(clipFeature(_, { (x, y) => true }))
.flatten
.iterator
case l: Line =>
mapTransform.multiLineToKeys(MultiLine(l)).map(clipFeature(_, { (x,y) => false })).flatten
case ml: MultiLine =>
mapTransform.multiLineToKeys(ml).map(clipFeature(_, { (x,y) => false })).flatten
case p: Polygon =>
val pg = PreparedGeometryFactory.prepare(p.jtsGeom)
val contains = { (extent: Extent, geom: G) =>
pg.contains(extent.toPolygon.jtsGeom)
}

mapTransform.multiPolygonToKeys(MultiPolygon(p)).map(clipFeature(_, contains)).flatten
case mp: MultiPolygon =>
val pg = PreparedGeometryFactory.prepare(mp.jtsGeom)
val contains = { (extent: Extent, geom: G) =>
pg.contains(extent.toPolygon.jtsGeom)
}

mapTransform.multiPolygonToKeys(mp).map(clipFeature(_, contains)).flatten
case gc: GeometryCollection =>
val b: GridBounds = mapTransform(gc.envelope)

val keys: Iterator[SpatialKey] = for {
x <- Iterator.range(b.colMin, b.colMax+1)
y <- Iterator.range(b.rowMin, b.rowMax+1)
} yield SpatialKey(x, y)

keys.filter(k => mapTransform(k).toPolygon.intersects(gc))
.map(k => clipFeature(k, { (x,y) => false })).flatten
}

iterator
}
}
32 changes: 32 additions & 0 deletions spark/src/main/scala/geotrellis/spark/clip/ClipToGridMethods.scala
@@ -0,0 +1,32 @@
/*
* Copyright 2017 Azavea
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package geotrellis.spark.clip

import geotrellis.spark._
import geotrellis.spark.tiling.LayoutDefinition
import geotrellis.util.MethodExtensions
import geotrellis.vector._

import org.apache.spark.rdd._

trait ClipToGridMethods[G <: Geometry, D] extends MethodExtensions[RDD[Feature[G, D]]] {
def clipToGrid(layout: LayoutDefinition): RDD[(SpatialKey, Feature[Geometry, D])] =
ClipToGrid(self, layout)

def clipToGrid[D2](layout: LayoutDefinition, clipData: (Geometry, D) => Option[D2]): RDD[(SpatialKey, Feature[Geometry, D2])] =
ClipToGrid(self, layout, clipData)
}
28 changes: 28 additions & 0 deletions spark/src/main/scala/geotrellis/spark/clip/Implicits.scala
@@ -0,0 +1,28 @@
/*
* Copyright 2017 Azavea
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package geotrellis.spark.clip

import geotrellis.vector._

import org.apache.spark.rdd._

trait Implicits {
implicit class withClipToGridMethods[G <: Geometry, D](val self: RDD[Feature[G, D]])
extends ClipToGridMethods[G, D]
}

object Implicits extends Implicits
62 changes: 8 additions & 54 deletions spark/src/main/scala/geotrellis/spark/io/LayerFilter.scala
Expand Up @@ -134,34 +134,11 @@ object Intersects {
new LayerFilter[K, Intersects.type, MultiPolygon, M] {
def apply(metadata: M, kb: KeyBounds[K], polygon: MultiPolygon) = {
val mapTransform = metadata.getComponent[LayoutDefinition].mapTransform
val extent: Extent = polygon.envelope
val keyext: Extent = mapTransform.keyToExtent(kb.minKey.getComponent[SpatialKey])
val bounds: GridBounds = mapTransform.extentToBounds(extent)
val options = Options(includePartial=true, sampleType=PixelIsArea)

val boundsExtent: Extent = mapTransform(bounds)
val rasterExtent = RasterExtent(boundsExtent, bounds.width, bounds.height)

/*
* Use the Rasterizer to construct a list of tiles which meet
* the query polygon. That list of tiles is stored as an
* array of tuples which is then mapped-over to produce an
* array of KeyBounds.
*/
val tiles = new ConcurrentHashMap[(Int,Int), Unit]
val fn = new Callback {
def apply(col : Int, row : Int): Unit = {
val tile : (Int, Int) = (bounds.colMin + col, bounds.rowMin + row)
tiles.put(tile, Unit)
}
}

polygon.foreach(rasterExtent, options)(fn)
tiles.keys.asScala
.map({ tile =>
mapTransform.multiPolygonToKeys(polygon)
.map({ key =>
val qb = KeyBounds(
kb.minKey setComponent SpatialKey(tile._1, tile._2),
kb.maxKey setComponent SpatialKey(tile._1, tile._2))
kb.minKey setComponent key,
kb.maxKey setComponent key)
qb intersect kb match {
case kb: KeyBounds[K] => List(kb)
case EmptyBounds => Nil
Expand Down Expand Up @@ -215,34 +192,11 @@ object Intersects {
new LayerFilter[K, Intersects.type, MultiLine, M] {
def apply(metadata: M, kb: KeyBounds[K], multiLine: MultiLine) = {
val mapTransform = metadata.getComponent[LayoutDefinition].mapTransform
val extent: Extent = multiLine.envelope
val keyext: Extent = mapTransform.keyToExtent(kb.minKey.getComponent[SpatialKey])
val bounds: GridBounds = mapTransform(extent)
val options = Options(includePartial=true, sampleType=PixelIsArea)

val boundsExtent: Extent = mapTransform(bounds)
val rasterExtent = RasterExtent(boundsExtent, bounds.width, bounds.height)

/*
* Use the Rasterizer to construct a list of tiles which meet
* the query polygon. That list of tiles is stored as an
* array of tuples which is then mapped-over to produce an
* array of KeyBounds.
*/
val tiles = new ConcurrentHashMap[(Int,Int), Unit]
val fn = new Callback {
def apply(col : Int, row : Int): Unit = {
val tile : (Int, Int) = (bounds.colMin + col, bounds.rowMin + row)
tiles.put(tile, Unit)
}
}

multiLine.foreach(rasterExtent, options)(fn)
tiles.keys.asScala
.map({ tile =>
mapTransform.multiLineToKeys(multiLine)
.map({ key =>
val qb = KeyBounds(
kb.minKey setComponent SpatialKey(tile._1, tile._2),
kb.maxKey setComponent SpatialKey(tile._1, tile._2))
kb.minKey setComponent key,
kb.maxKey setComponent key)
qb intersect kb match {
case kb: KeyBounds[K] => List(kb)
case EmptyBounds => Nil
Expand Down
1 change: 1 addition & 0 deletions spark/src/main/scala/geotrellis/spark/package.scala
Expand Up @@ -35,6 +35,7 @@ import java.time.Instant

package object spark
extends buffer.Implicits
with clip.Implicits
with costdistance.Implicits
with crop.Implicits
with density.Implicits
Expand Down
55 changes: 55 additions & 0 deletions spark/src/main/scala/geotrellis/spark/tiling/MapKeyTransform.scala
Expand Up @@ -18,10 +18,14 @@ package geotrellis.spark.tiling

import geotrellis.spark._
import geotrellis.raster._
import geotrellis.raster.rasterize.{Rasterizer, Callback}
import geotrellis.vector._
import geotrellis.proj4._
import geotrellis.util._

import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConverters._

object MapKeyTransform {
def apply(crs: CRS, level: LayoutLevel): MapKeyTransform =
apply(crs.worldExtent, level.layout.layoutCols, level.layout.layoutRows)
Expand Down Expand Up @@ -122,4 +126,55 @@ class MapKeyTransform(val extent: Extent, val layoutCols: Int, val layoutRows: I
extent.xmin + (col + 1) * tileWidth,
extent.ymax - row * tileHeight
)

def multiLineToKeys(multiLine: MultiLine): Iterator[SpatialKey] = {
val extent = multiLine.envelope
val bounds: GridBounds = extentToBounds(extent)
val options = Rasterizer.Options(includePartial=true, sampleType=PixelIsArea)

val boundsExtent: Extent = boundsToExtent(bounds)
val rasterExtent = RasterExtent(boundsExtent, bounds.width, bounds.height)

/*
* Use the Rasterizer to construct a list of tiles which meet
* the query polygon. That list of tiles is stored as an
* array of tuples which is then mapped-over to produce an
* array of KeyBounds.
*/
val tiles = new ConcurrentHashMap[(Int,Int), Unit]
val fn = new Callback {
def apply(col : Int, row : Int): Unit = {
val tile : (Int, Int) = (bounds.colMin + col, bounds.rowMin + row)
tiles.put(tile, Unit)
}
}

multiLine.foreach(rasterExtent, options)(fn)
tiles.keys.asScala.map { case (col, row) => SpatialKey(col, row) }
}

def multiPolygonToKeys(multiPolygon: MultiPolygon): Iterator[SpatialKey] = {
val extent = multiPolygon.envelope
val bounds: GridBounds = extentToBounds(extent)
val options = Rasterizer.Options(includePartial=true, sampleType=PixelIsArea)
val boundsExtent: Extent = boundsToExtent(bounds)
val rasterExtent = RasterExtent(boundsExtent, bounds.width, bounds.height)

/*
* Use the Rasterizer to construct a list of tiles which meet
* the query polygon. That list of tiles is stored as an
* array of tuples which is then mapped-over to produce an
* array of KeyBounds.
*/
val tiles = new ConcurrentHashMap[(Int,Int), Unit]
val fn = new Callback {
def apply(col : Int, row : Int): Unit = {
val tile : (Int, Int) = (bounds.colMin + col, bounds.rowMin + row)
tiles.put(tile, Unit)
}
}

multiPolygon.foreach(rasterExtent, options)(fn)
tiles.keys.asScala.map { case (col, row) => SpatialKey(col, row) }
}
}

0 comments on commit 1d937db

Please sign in to comment.