Skip to content

Commit

Permalink
Generate Largest-Possible Tiles
Browse files Browse the repository at this point in the history
  • Loading branch information
James McClain authored and echeipesh committed Oct 17, 2017
1 parent c67c787 commit 81052f5
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ class S3GeoTiffRDDSpec
val geoTiffBytes = Files.readAllBytes(Paths.get(testGeoTiffPath))
mockClient.putObject(bucket, key, geoTiffBytes)

val options = S3GeoTiffRDD.Options(getS3Client = () => new MockS3Client, partitionBytes=1<<20)
val options = S3GeoTiffRDD.Options(getS3Client = () => new MockS3Client, partitionBytes=1<<20, maxTileSize = Some(64))
val geometry = Line(Point(141.7066667, -17.5200000), Point(142.1333333, -17.7))
val fn = {( _: Any, key: ProjectedExtent) => key }
val source1 =
Expand Down
34 changes: 29 additions & 5 deletions spark/src/main/scala/geotrellis/spark/io/RasterReader.scala
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,20 @@ object RasterReader {
cols: Int, rows: Int, maxSize: Int,
segCols: Int, segRows: Int
): Array[GridBounds] = {
val colSize: Int = if (maxSize >= segCols) segCols; else best(maxSize, segCols)
val rowSize: Int = if (maxSize >= segRows) segRows; else best(maxSize, segRows)
val colSize: Int =
if (maxSize >= segCols * 2) {
math.floor(maxSize.toDouble / segCols).toInt * segCols
} else if (maxSize >= segCols) {
segCols
} else best(maxSize, segCols)

val rowSize: Int =
if (maxSize >= segRows * 2) {
math.floor(maxSize.toDouble / segRows).toInt * segRows
} else if (maxSize >= segRows) {
segRows
} else best(maxSize, segRows)

val windows = listWindows(cols, rows, colSize, rowSize)

windows
Expand All @@ -90,10 +102,22 @@ object RasterReader {
extent: Extent, segCols: Int, segRows: Int, geometry: Geometry,
options: Rasterizer.Options = Rasterizer.Options.DEFAULT
): Array[GridBounds] = {
val maxColSize: Int =
if (maxSize >= segCols * 2) {
math.floor(maxSize.toDouble / segCols).toInt * segCols
} else if (maxSize >= segCols) {
segCols
} else best(maxSize, segCols)

val maxRowSize: Int =
if (maxSize >= segRows) {
math.floor(maxSize.toDouble / segRows).toInt * segRows
} else if (maxSize >= segRows) {
segRows
} else best(maxSize, segRows)

val result = scala.collection.mutable.ArrayBuffer[GridBounds]()
val maxColSize: Int = if (maxSize >= segCols) segCols; else best(maxSize, segCols)
val maxRowSize: Int = if (maxSize >= segRows) segRows; else best(maxSize, segRows)
val re = RasterExtent(extent, cols/maxColSize, rows/maxRowSize)
val re = RasterExtent(extent, math.max(cols/maxColSize,1), math.max(rows/maxRowSize,1))

Rasterizer.foreachCellByGeometry(geometry, re, options)({ (col: Int, row: Int) =>
result +=
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ class HadoopGeoTiffRDDSpec

it("should filter by geometry") {
val testGeoTiffPath = new Path(localFS.getWorkingDirectory, "spark/src/test/resources/all-ones.tif")
val options = HadoopGeoTiffRDD.Options(partitionBytes=Some(1<<20))
val options = HadoopGeoTiffRDD.Options(partitionBytes=Some(1<<20), maxTileSize = Some(64))
val geometry = Line(Point(141.7066667, -17.5200000), Point(142.1333333, -17.7))
val fn = {( _: URI, key: ProjectedExtent) => key }
val source1 =
Expand Down

0 comments on commit 81052f5

Please sign in to comment.