Skip to content

Commit

Permalink
Add cloud optimized GeoTiff write support
Browse files Browse the repository at this point in the history
  • Loading branch information
pomadchin committed Dec 8, 2017
1 parent 6dedd02 commit 1d3cb81
Showing 1 changed file with 144 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@
package geotrellis.raster.io.geotiff.writer

import geotrellis.raster.io.geotiff._

import spire.syntax.cfor._

import java.io.ByteArrayOutputStream
import java.io.DataOutputStream
import java.io.File
import java.io.FileOutputStream
import java.nio.ByteOrder

import scala.collection.mutable.ListBuffer

object GeoTiffWriter {
def write(geoTiff: GeoTiffData, path: String): Unit = {
val fos = new FileOutputStream(new File(path))
Expand Down Expand Up @@ -65,7 +65,7 @@ class GeoTiffWriter(geoTiff: GeoTiffData, dos: DataOutputStream) {
LittleEndianToBytes
}

val overviewsIter = (geoTiff +: geoTiff.overviews).toIterator
lazy val IFDs: List[GeoTiffData] = geoTiff :: geoTiff.overviews

// Used to immediately write values to our ultimate destination.
var index: Int = 0
Expand All @@ -78,6 +78,11 @@ class GeoTiffWriter(geoTiff: GeoTiffData, dos: DataOutputStream) {
def writeFloat(value: Float) { writeBytes(toBytes(value)) }
def writeDouble(value: Double) { writeBytes(toBytes(value)) }

private def append(list: List[GeoTiffData]): Unit = {
val overviewsIter = (geoTiff +: geoTiff.overviews).toIterator
overviewsIter.foreach(append(_, !overviewsIter.hasNext))
}

private def append(geoTiff: GeoTiffData, last: Boolean = true): Unit = {
val (fieldValues, offsetFieldValueBuilder) = TiffTagFieldValue.collect(geoTiff)
val segments = geoTiff.imageData.segmentBytes
Expand Down Expand Up @@ -107,7 +112,7 @@ class GeoTiffWriter(geoTiff: GeoTiffData, dos: DataOutputStream) {
}

// Write tag start offset (immediately after this 4 byte integer)
//writeInt(index + 4)
// writeInt(index + 4)

// Compute the offsetFieldValue
val offsetFieldValue = {
Expand Down Expand Up @@ -183,7 +188,139 @@ class GeoTiffWriter(geoTiff: GeoTiffData, dos: DataOutputStream) {
}
}

def write(): Unit = {
private def appendCloudOptimized(list: List[GeoTiffData]): Unit = {
// sorted from the main IFD to the smalles overview
val ifdCount = list.length

val dataOffsets = list.map { geoTiff =>
val (fieldValues, offsetFieldValueBuilder) = TiffTagFieldValue.collect(geoTiff)
val segments = geoTiff.imageData.segmentBytes
val segmentCount = segments.size
val segmentBytesCount = (0 until segmentCount).map(segments.getSegmentByteCount).sum

val tagFieldByteCount = (fieldValues.length + 1) * 12 // Tiff Tag Fields are 12 bytes long.

val tagDataByteCount = {
var s = 0
cfor(0)(_ < fieldValues.length, _ + 1) { i =>
val len = fieldValues(i).value.length
// If value fits in 4 bytes, we store it in the offset,
// so only count data more than 4 bytes.
if(len > 4) {
s += fieldValues(i).value.length
}
}

// Account for offsetFieldValue size
// each offset is an Int long.
if(segmentCount > 1) {
s += (segmentCount * 4)
}

s
}

val imageDataStartOffset =
2 + // Short for number of tags
4 + // Int for next IFD address
tagFieldByteCount + tagDataByteCount

(fieldValues -> offsetFieldValueBuilder, tagFieldByteCount -> tagDataByteCount, imageDataStartOffset -> segmentBytesCount)
}


// IFD image data is stored here, sorted from min to max
val imageData = new Array[(SegmentBytes, Int)](ifdCount)

cfor(0)(_ < ifdCount, _ + 1) { i =>
val (geoTiff, value, last) = (list(i), dataOffsets(i), i + 1 == ifdCount)
val (fieldValues, offsetFieldValueBuilder) = value._1
val segments = geoTiff.imageData.segmentBytes
val segmentCount = segments.size

val (tagFieldByteCount, tagDataByteCount) = value._2

// Compute the offsetFieldValue
// Sum smaller overviews + offset of the current IFD
val imageDataStartOffset: Int =
dataOffsets.slice(i + 1, dataOffsets.length).foldLeft(index) {
case (acc: Int, (_, _, (absStartOffset: Int, bytesCount: Int))) =>
acc + absStartOffset + bytesCount
} + value._3._1

val offsetFieldValue = {
val offsets = Array.ofDim[Int](segmentCount)
var offset = imageDataStartOffset
segments.getSegments(0 until segmentCount).foreach { case (i, segment) =>
offsets(i) = offset
offset += segment.length
}
offsetFieldValueBuilder(offsets)
}

// Sort the fields by tag code.
val sortedTagFieldValues = (offsetFieldValue :: fieldValues.toList).sortBy(_.tag).toArray

// Write the number of tags
writeShort(sortedTagFieldValues.length)

// Write tag fields, sorted by tag code.
val tagDataStartOffset =
index +
4 + // Int for next IFD address
tagFieldByteCount

var tagDataOffset = tagDataStartOffset

cfor(0)(_ < sortedTagFieldValues.length, _ + 1) { i =>
val TiffTagFieldValue(tag, fieldType, length, value) = sortedTagFieldValues(i)
writeShort(tag)
writeShort(fieldType)
writeInt(length)
if(value.length > 4) {
writeInt(tagDataOffset)
tagDataOffset += value.length
} else {
var i = 0
while(i < value.length) {
writeByte(value(i))
i += 1
}
while(i < 4) {
writeByte(0.toByte)
i += 1
}
}
}

// Write 0 integer to indicate the end of the last IFD.
if(last) writeInt(0)
else writeInt(tagDataOffset)

assert(index == tagDataStartOffset, s"Writer error: index at $index, should be $tagDataStartOffset")
assert(tagDataOffset == tagDataStartOffset + tagDataByteCount)

// write tag data
cfor(0)(_ < sortedTagFieldValues.length, _ + 1) { i =>
val TiffTagFieldValue(tag, fieldType, length, value) = sortedTagFieldValues(i)
if(value.length > 4) {
writeBytes(value)
}
}

imageData(ifdCount - i - 1) = segments -> segmentCount
}

// write image data in the reversed order
// smaller IFDs data offset should go first
imageData.foreach { case (segments, segmentCount) =>
segments.getSegments(0 until segmentCount).foreach { case (_, segment) =>
writeBytes(segment)
}
}
}

def write(cloudOptimized: Boolean = false): Unit = {
// Write the header that determines the endian
if (geoTiff.imageData.decompressor.byteOrder == ByteOrder.BIG_ENDIAN) {
val m = 'M'.toByte
Expand All @@ -202,7 +339,8 @@ class GeoTiffWriter(geoTiff: GeoTiffData, dos: DataOutputStream) {
writeInt(index + 4)

// Append all IFDs
overviewsIter.foreach(append(_, !overviewsIter.hasNext))
if(cloudOptimized) appendCloudOptimized(IFDs)
else append(IFDs)

dos.flush()
}
Expand Down

0 comments on commit 1d3cb81

Please sign in to comment.