diff --git a/s3/src/main/scala/geotrellis/spark/io/s3/S3GeoTiffRDD.scala b/s3/src/main/scala/geotrellis/spark/io/s3/S3GeoTiffRDD.scala index f231184d0f..8362cc8876 100644 --- a/s3/src/main/scala/geotrellis/spark/io/s3/S3GeoTiffRDD.scala +++ b/s3/src/main/scala/geotrellis/spark/io/s3/S3GeoTiffRDD.scala @@ -213,7 +213,6 @@ object S3GeoTiffRDD extends LazyLogging { RasterReader .listWindows(cols, rows, options.maxTileSize.getOrElse(1<<10), layout.tileCols, layout.tileRows) - ._3 .map((objectRequest, _)) } diff --git a/spark/src/main/scala/geotrellis/spark/io/GeoTiffInfoReader.scala b/spark/src/main/scala/geotrellis/spark/io/GeoTiffInfoReader.scala index ce7b108473..8cb45f6470 100644 --- a/spark/src/main/scala/geotrellis/spark/io/GeoTiffInfoReader.scala +++ b/spark/src/main/scala/geotrellis/spark/io/GeoTiffInfoReader.scala @@ -80,15 +80,16 @@ private [geotrellis] trait GeoTiffInfoReader extends LazyLogging { case DoubleCellType | DoubleConstantNoDataCellType | DoubleUserDefinedNoDataCellType(_) => 8 } } - val (tileCols, tileRows, fileWindows) = + val fileWindows = RasterReader.listWindows(cols, rows, maxSize, segCols, segRows) - val windowBytes = tileCols * tileRows * depth - var currentBytes = 0 + var currentBytes: Long = 0 val currentPartition = mutable.ArrayBuffer.empty[GridBounds] val allPartitions = mutable.ArrayBuffer.empty[Array[GridBounds]] fileWindows.foreach({ gb => + val windowBytes = gb.sizeLong * depth + // Add the window to the present partition if (currentBytes + windowBytes <= partitionBytes) { currentPartition.append(gb) diff --git a/spark/src/main/scala/geotrellis/spark/io/RasterReader.scala b/spark/src/main/scala/geotrellis/spark/io/RasterReader.scala index debb878465..f3c8a7f246 100644 --- a/spark/src/main/scala/geotrellis/spark/io/RasterReader.scala +++ b/spark/src/main/scala/geotrellis/spark/io/RasterReader.scala @@ -74,12 +74,12 @@ object RasterReader { def listWindows( cols: Int, rows: Int, maxSize: Int, segCols: Int, segRows: Int - ): (Int, Int, Array[GridBounds]) = { + ): Array[GridBounds] = { val colSize: Int = if (maxSize >= segCols) segCols; else best(maxSize, segCols) val rowSize: Int = if (maxSize >= segRows) segRows; else best(maxSize, segRows) val windows = listWindows(cols, rows, colSize, rowSize) - (colSize, rowSize, windows) + windows } /** List all pixel windows that cover a grid of given size */ diff --git a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopGeoTiffRDD.scala b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopGeoTiffRDD.scala index 853beac2fe..70ac785744 100644 --- a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopGeoTiffRDD.scala +++ b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopGeoTiffRDD.scala @@ -184,7 +184,6 @@ object HadoopGeoTiffRDD extends LazyLogging { RasterReader .listWindows(cols, rows, options.maxTileSize.getOrElse(1<<10), layout.tileCols, layout.tileRows) - ._3 .map((objectRequest, _)) }