Skip to content

Commit

Permalink
Add S3COGPersitence spec
Browse files Browse the repository at this point in the history
  • Loading branch information
pomadchin committed Jan 11, 2018
1 parent 72b93e0 commit 6fb81db
Show file tree
Hide file tree
Showing 10 changed files with 169 additions and 347 deletions.
2 changes: 1 addition & 1 deletion s3/src/main/scala/geotrellis/spark/io/s3/S3Client.scala
Expand Up @@ -27,7 +27,7 @@ import java.io.{InputStream, ByteArrayInputStream}
import scala.annotation.tailrec
import scala.collection.JavaConverters._

trait S3Client extends LazyLogging {
trait S3Client extends LazyLogging with Serializable {

def doesBucketExist(bucket: String): Boolean

Expand Down
Expand Up @@ -55,7 +55,10 @@ class S3COGLayerReader(
V <: CellGrid: TiffMethods: (? => TileMergeMethods[V]): ClassTag
](id: LayerId, tileQuery: LayerQuery[K, TileLayerMetadata[K]], numPartitions: Int, filterIndexOnly: Boolean) = {
def getKeyPath(zoomRange: ZoomRange, maxWidth: Int): BigInt => String =
(index: BigInt) => s"$bucket/${makePath(prefix, Index.encode(index, maxWidth))}.${Extension}"
(index: BigInt) =>
s"$bucket/$prefix/${id.name}/" +
s"${zoomRange.minZoom}_${zoomRange.maxZoom}/" +
s"${Index.encode(index, maxWidth)}.${Extension}"

baseRead[K, V](
id = id,
Expand All @@ -68,5 +71,14 @@ class S3COGLayerReader(
defaultThreads = defaultThreads
)
}
}

object S3COGLayerReader {
def apply(attributeStore: S3AttributeStore)(implicit sc: SparkContext): S3COGLayerReader =
new S3COGLayerReader(
attributeStore,
attributeStore.bucket,
attributeStore.prefix,
() => attributeStore.s3Client
)
}
Expand Up @@ -86,3 +86,8 @@ class S3COGLayerWriter(
}
}
}

object S3COGLayerWriter {
def apply(attributeStore: S3AttributeStore): S3COGLayerWriter =
new S3COGLayerWriter(attributeStore, () => attributeStore.s3Client)
}
Expand Up @@ -21,14 +21,12 @@ import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.spark.io.cog._
import geotrellis.spark.io.index._
import geotrellis.spark.io.s3.S3Client
import geotrellis.spark.io.s3.{S3AttributeStore, S3Client}
import geotrellis.util._

import spray.json._
import com.amazonaws.services.s3.model.AmazonS3Exception

import scala.reflect.ClassTag

import java.net.URI

class S3COGValueReader(
Expand All @@ -46,7 +44,9 @@ class S3COGValueReader(
V <: CellGrid: TiffMethods
](layerId: LayerId): Reader[K, V] = {
def keyPath(key: K, maxWidth: Int, baseKeyIndex: KeyIndex[K], zoomRange: ZoomRange): String =
s"$bucket/$prefix/${Index.encode(baseKeyIndex.toIndex(key), maxWidth)}.${Extension}"
s"$bucket/$prefix/${layerId.name}/" +
s"${zoomRange.minZoom}_${zoomRange.maxZoom}/" +
s"${Index.encode(baseKeyIndex.toIndex(key), maxWidth)}.${Extension}"

baseReader[K, V](
layerId,
Expand All @@ -60,3 +60,10 @@ class S3COGValueReader(
}
}

object S3COGValueReader {
def apply(s3attributeStore: S3AttributeStore): S3COGValueReader =
new S3COGValueReader(s3attributeStore, s3attributeStore.bucket, s3attributeStore.prefix) {
override def s3Client: S3Client = s3attributeStore.s3Client
}
}

Expand Up @@ -52,7 +52,10 @@ class S3CollectionCOGLayerReader(
V <: CellGrid: TiffMethods: (? => TileMergeMethods[V]): ClassTag
](id: LayerId, rasterQuery: LayerQuery[K, TileLayerMetadata[K]], indexFilterOnly: Boolean) = {
def getKeyPath(zoomRange: ZoomRange, maxWidth: Int): BigInt => String =
(index: BigInt) => s"$bucket/${makePath(prefix, Index.encode(index, maxWidth))}.${Extension}"
(index: BigInt) =>
s"$bucket/$prefix/${id.name}/" +
s"${zoomRange.minZoom}_${zoomRange.maxZoom}/" +
s"${Index.encode(index, maxWidth)}.${Extension}"

baseRead[K, V](
id = id,
Expand All @@ -65,3 +68,13 @@ class S3CollectionCOGLayerReader(
)
}
}

object S3CollectionCOGLayerReader {
def apply(attributeStore: S3AttributeStore): S3CollectionCOGLayerReader =
new S3CollectionCOGLayerReader(
attributeStore,
attributeStore.bucket,
attributeStore.prefix,
() => attributeStore.s3Client
)
}

0 comments on commit 6fb81db

Please sign in to comment.