Skip to content
Permalink
Browse files

Replace spray with circe (#3006)

* Replace spray-json with circe
  • Loading branch information
moradology authored and pomadchin committed Jun 27, 2019
1 parent 474ed90 commit c5775b11d229383b1fdebe90e3a7288a5e6c5385
Showing with 2,605 additions and 2,868 deletions.
  1. +3 −6 accumulo-spark/src/main/scala/geotrellis/spark/store/accumulo/AccumuloLayerCopier.scala
  2. +9 −10 accumulo-spark/src/main/scala/geotrellis/spark/store/accumulo/AccumuloLayerManager.scala
  3. +0 −2 accumulo-spark/src/main/scala/geotrellis/spark/store/accumulo/AccumuloLayerMover.scala
  4. +3 −4 accumulo-spark/src/main/scala/geotrellis/spark/store/accumulo/AccumuloLayerReader.scala
  5. +5 −8 accumulo-spark/src/main/scala/geotrellis/spark/store/accumulo/AccumuloLayerReindexer.scala
  6. +9 −11 accumulo-spark/src/main/scala/geotrellis/spark/store/accumulo/AccumuloLayerWriter.scala
  7. +0 −1 accumulo-spark/src/main/scala/geotrellis/spark/store/accumulo/AccumuloRDDReader.scala
  8. +10 −10 accumulo/src/main/scala/geotrellis/store/accumulo/AccumuloAttributeStore.scala
  9. +0 −1 accumulo/src/main/scala/geotrellis/store/accumulo/AccumuloCollectionLayerProvider.scala
  10. +3 −4 accumulo/src/main/scala/geotrellis/store/accumulo/AccumuloCollectionLayerReader.scala
  11. +27 −30 accumulo/src/main/scala/geotrellis/store/accumulo/AccumuloLayerHeader.scala
  12. +1 −3 accumulo/src/main/scala/geotrellis/store/accumulo/AccumuloUtils.scala
  13. +4 −5 accumulo/src/main/scala/geotrellis/store/accumulo/AccumuloValueReader.scala
  14. +3 −4 cassandra-spark/src/main/scala/geotrellis/spark/store/cassandra/CassandraLayerCopier.scala
  15. +9 −10 cassandra-spark/src/main/scala/geotrellis/spark/store/cassandra/CassandraLayerManager.scala
  16. +3 −4 cassandra-spark/src/main/scala/geotrellis/spark/store/cassandra/CassandraLayerReader.scala
  17. +6 −8 cassandra-spark/src/main/scala/geotrellis/spark/store/cassandra/CassandraLayerReindexer.scala
  18. +9 −11 cassandra-spark/src/main/scala/geotrellis/spark/store/cassandra/CassandraLayerWriter.scala
  19. +12 −9 cassandra/src/main/scala/geotrellis/store/cassandra/CassandraAttributeStore.scala
  20. +3 −3 cassandra/src/main/scala/geotrellis/store/cassandra/CassandraCollectionLayerReader.scala
  21. +29 −32 cassandra/src/main/scala/geotrellis/store/cassandra/CassandraLayerHeader.scala
  22. +4 −6 cassandra/src/main/scala/geotrellis/store/cassandra/CassandraValueReader.scala
  23. +39 −36 doc-examples/src/main/scala/geotrellis/doc/examples/spark/ShardingKeyIndex.scala
  24. +0 −4 doc-examples/src/main/scala/geotrellis/doc/examples/spark/VectorExamples.scala
  25. +33 −42 doc-examples/src/main/scala/geotrellis/doc/examples/spark/VoxelKey.scala
  26. +14 −7 doc-examples/src/test/scala/geotrellis/doc/examples/spark/ShardingKeyIndexSpec.scala
  27. +8 −21 geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveAttributeStore.scala
  28. +8 −12 geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveLayerReader.scala
  29. +5 −6 geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveLayerWriter.scala
  30. +4 −3 hbase-spark/src/main/scala/geotrellis/spark/store/hbase/HBaseLayerCopier.scala
  31. +10 −9 hbase-spark/src/main/scala/geotrellis/spark/store/hbase/HBaseLayerManager.scala
  32. +4 −3 hbase-spark/src/main/scala/geotrellis/spark/store/hbase/HBaseLayerReader.scala
  33. +7 −8 hbase-spark/src/main/scala/geotrellis/spark/store/hbase/HBaseLayerReindexer.scala
  34. +10 −10 hbase-spark/src/main/scala/geotrellis/spark/store/hbase/HBaseLayerWriter.scala
  35. +11 −9 hbase/src/main/scala/geotrellis/store/hbase/HBaseAttributeStore.scala
  36. +4 −3 hbase/src/main/scala/geotrellis/store/hbase/HBaseCollectionLayerReader.scala
  37. +27 −29 hbase/src/main/scala/geotrellis/store/hbase/HBaseLayerHeader.scala
  38. +4 −5 hbase/src/main/scala/geotrellis/store/hbase/HBaseValueReader.scala
  39. +3 −2 layer/src/main/scala/geotrellis/layer/Implicits.scala
  40. +22 −0 layer/src/main/scala/geotrellis/layer/KeyBounds.scala
  41. +2 −0 layer/src/main/scala/geotrellis/layer/LayoutDefinition.scala
  42. +3 −0 layer/src/main/scala/geotrellis/layer/SpaceTimeKey.scala
  43. +3 −0 layer/src/main/scala/geotrellis/layer/SpatialKey.scala
  44. +3 −0 layer/src/main/scala/geotrellis/layer/TemporalKey.scala
  45. +8 −2 layer/src/main/scala/geotrellis/layer/TileLayerMetadata.scala
  46. +0 −189 layer/src/main/scala/geotrellis/layer/json/Implicits.scala
  47. +0 −92 layer/src/main/scala/geotrellis/layer/json/KeyFormats.scala
  48. +1 −2 project/Dependencies.scala
  49. +6 −3 project/Settings.scala
  50. +0 −1 project/Version.scala
  51. +2 −0 raster/src/main/scala/geotrellis/raster/CellSize.scala
  52. +11 −1 raster/src/main/scala/geotrellis/raster/GridBounds.scala
  53. +4 −0 raster/src/main/scala/geotrellis/raster/RasterExtent.scala
  54. +2 −0 raster/src/main/scala/geotrellis/raster/TileLayout.scala
  55. +59 −53 raster/src/main/scala/geotrellis/raster/io/json/HistogramJsonFormats.scala
  56. +12 −102 raster/src/main/scala/geotrellis/raster/io/json/Implicits.scala
  57. +10 −7 raster/src/test/scala/geotrellis/raster/histogram/StreamingHistogramSpec.scala
  58. +1 −2 raster/src/test/scala/geotrellis/raster/interpolation/InverseDistanceWieghtedSpec.scala
  59. +0 −1 raster/src/test/scala/geotrellis/raster/interpolation/KrigingSpec.scala
  60. +12 −14 raster/src/test/scala/geotrellis/raster/io/json/CellSizeJsonSpec.scala
  61. +10 −12 s3-spark/src/main/scala/geotrellis/spark/store/s3/S3LayerManager.scala
  62. +3 −6 s3-spark/src/main/scala/geotrellis/spark/store/s3/S3LayerReader.scala
  63. +1 −4 s3-spark/src/main/scala/geotrellis/spark/store/s3/S3LayerReindexer.scala
  64. +11 −14 s3-spark/src/main/scala/geotrellis/spark/store/s3/S3LayerWriter.scala
  65. +0 −1 s3-spark/src/main/scala/geotrellis/spark/store/s3/SaveToS3Methods.scala
  66. +0 −2 s3-spark/src/main/scala/geotrellis/spark/store/s3/TemporalGeoTiffS3InputFormat.scala
  67. +1 −0 s3-spark/src/main/scala/geotrellis/spark/store/s3/TemporalMultibandGeoTiffS3InputFormat.scala
  68. +5 −7 s3-spark/src/main/scala/geotrellis/spark/store/s3/cog/S3COGLayerReader.scala
  69. +2 −6 s3-spark/src/main/scala/geotrellis/spark/store/s3/cog/S3COGLayerWriter.scala
  70. +2 −5 s3-spark/src/main/scala/geotrellis/spark/store/s3/geotiff/S3IMGeoTiffAttributeStore.scala
  71. +6 −8 s3-spark/src/main/scala/geotrellis/spark/store/s3/geotiff/S3JsonGeoTiffAttributeStore.scala
  72. +1 −2 s3-spark/src/test/scala/geotrellis/spark/store/s3/S3SpaceTimeSpec.scala
  73. +1 −1 s3-spark/src/test/scala/geotrellis/spark/store/s3/S3SpatialSpec.scala
  74. +1 −1 s3-spark/src/test/scala/geotrellis/spark/store/s3/S3TileFeatureSpaceTimeSpec.scala
  75. +1 −1 s3-spark/src/test/scala/geotrellis/spark/store/s3/S3TileFeatureSpatialSpec.scala
  76. +10 −11 s3/src/main/scala/geotrellis/store/s3/S3AttributeStore.scala
  77. +0 −1 ...ain/scala/geotrellis/store/s3/{S3CollectionLayerProivder.scala → S3CollectionLayerProvider.scala}
  78. +3 −3 s3/src/main/scala/geotrellis/store/s3/S3CollectionLayerReader.scala
  79. +3 −6 s3/src/main/scala/geotrellis/store/s3/S3LayerCopier.scala
  80. +29 −34 s3/src/main/scala/geotrellis/store/s3/S3LayerHeader.scala
  81. +0 −2 s3/src/main/scala/geotrellis/store/s3/S3LayerMover.scala
  82. +4 −7 s3/src/main/scala/geotrellis/store/s3/S3ValueReader.scala
  83. +0 −1 s3/src/main/scala/geotrellis/store/s3/cog/S3COGCollectionLayerProvider.scala
  84. +3 −2 s3/src/main/scala/geotrellis/store/s3/cog/S3COGCollectionLayerReader.scala
  85. +3 −3 s3/src/main/scala/geotrellis/store/s3/cog/S3COGValueReader.scala
  86. +4 −3 spark-pipeline/src/main/scala/geotrellis/spark/pipeline/ast/Output.scala
  87. +2 −3 spark-pipeline/src/main/scala/geotrellis/spark/pipeline/ast/Transform.scala
  88. +0 −12 spark-pipeline/src/main/scala/geotrellis/spark/pipeline/json/Implicits.scala
  89. +3 −7 spark-testkit/src/main/scala/geotrellis/spark/testkit/io/PersistenceSpec.scala
  90. +2 −5 spark-testkit/src/main/scala/geotrellis/spark/testkit/io/cog/COGPersistenceSpec.scala
  91. +6 −1 spark/src/main/scala/geotrellis/spark/CollectTileLayerMetadata.scala
  92. +3 −3 spark/src/main/scala/geotrellis/spark/ContextRDD.scala
  93. +5 −0 spark/src/main/scala/geotrellis/spark/Implicits.scala
  94. +5 −7 spark/src/main/scala/geotrellis/spark/pyramid/Pyramid.scala
  95. +14 −15 spark/src/main/scala/geotrellis/spark/store/FilteringLayerReader.scala
  96. +5 −5 spark/src/main/scala/geotrellis/spark/store/GenericLayerCopier.scala
  97. +10 −12 spark/src/main/scala/geotrellis/spark/store/GenericLayerReindexer.scala
  98. +7 −7 spark/src/main/scala/geotrellis/spark/store/LayerReader.scala
  99. +18 −19 spark/src/main/scala/geotrellis/spark/store/LayerWriter.scala
  100. +2 −3 spark/src/main/scala/geotrellis/spark/store/RasterReader.scala
  101. +8 −10 spark/src/main/scala/geotrellis/spark/store/cog/COGLayer.scala
  102. +30 −31 spark/src/main/scala/geotrellis/spark/store/cog/COGLayerReader.scala
  103. +12 −14 spark/src/main/scala/geotrellis/spark/store/cog/COGLayerWriter.scala
  104. +9 −10 spark/src/main/scala/geotrellis/spark/store/file/FileLayerManager.scala
  105. +3 −5 spark/src/main/scala/geotrellis/spark/store/file/FileLayerReader.scala
  106. +1 −6 spark/src/main/scala/geotrellis/spark/store/file/FileLayerReindexer.scala
  107. +9 −12 spark/src/main/scala/geotrellis/spark/store/file/FileLayerWriter.scala
  108. +0 −3 spark/src/main/scala/geotrellis/spark/store/file/FileRDDWriter.scala
  109. +4 −7 spark/src/main/scala/geotrellis/spark/store/file/cog/FileCOGLayerReader.scala
  110. +3 −5 spark/src/main/scala/geotrellis/spark/store/file/cog/FileCOGLayerWriter.scala
  111. +0 −1 spark/src/main/scala/geotrellis/spark/store/file/geotiff/FileGeoTiffLayerReader.scala
  112. +2 −3 spark/src/main/scala/geotrellis/spark/store/file/geotiff/FileIMGeoTiffAttributeStore.scala
  113. +2 −3 spark/src/main/scala/geotrellis/spark/store/file/geotiff/FileJsonGeoTiffAttributeStore.scala
  114. +10 −13 spark/src/main/scala/geotrellis/spark/store/hadoop/HadoopLayerManager.scala
  115. +4 −4 spark/src/main/scala/geotrellis/spark/store/hadoop/HadoopLayerReader.scala
  116. +1 −4 spark/src/main/scala/geotrellis/spark/store/hadoop/HadoopLayerReindexer.scala
  117. +9 −9 spark/src/main/scala/geotrellis/spark/store/hadoop/HadoopLayerWriter.scala
  118. +0 −1 spark/src/main/scala/geotrellis/spark/store/hadoop/HadoopSparkContextMethods.scala
  119. +3 −5 spark/src/main/scala/geotrellis/spark/store/hadoop/Implicits.scala
  120. +2 −2 spark/src/main/scala/geotrellis/spark/store/hadoop/SaveToHadoop.scala
  121. +3 −3 spark/src/main/scala/geotrellis/spark/store/hadoop/SaveToHadoopMethods.scala
  122. +4 −3 spark/src/main/scala/geotrellis/spark/store/hadoop/cog/HadoopCOGLayerReader.scala
  123. +3 −5 spark/src/main/scala/geotrellis/spark/store/hadoop/cog/HadoopCOGLayerWriter.scala
  124. +1 −0 spark/src/main/scala/geotrellis/spark/store/hadoop/cog/HadoopCOGSparkLayerProvider.scala
  125. +2 −4 spark/src/main/scala/geotrellis/spark/store/hadoop/cog/Implicits.scala
  126. +13 −14 spark/src/main/scala/geotrellis/spark/store/hadoop/geotiff/GeoTiffMetadata.scala
  127. +2 −4 spark/src/main/scala/geotrellis/spark/store/hadoop/geotiff/HadoopIMGeoTiffAttributeStore.scala
  128. +7 −6 spark/src/main/scala/geotrellis/spark/store/hadoop/geotiff/HadoopJsonGeoTiffAttributeStore.scala
  129. +369 −1 spark/src/test/resources/cog-layer/attributes/multiband-cog-layer__.__0__.__metadata.json
  130. +187 −186 spark/src/test/resources/cog-layer/attributes/stitch-layer__.__0__.__metadata.json
  131. +0 −1 spark/src/test/scala/geotrellis/spark/InterfaceSpec.scala
  132. +0 −2 spark/src/test/scala/geotrellis/spark/store/AllOnesTestTileFeatureSpec.scala
  133. +0 −1 spark/src/test/scala/geotrellis/spark/store/AllOnesTestTileSpec.scala
  134. +2 −20 spark/src/test/scala/geotrellis/spark/store/AttributeStoreSpec.scala
  135. +0 −1 spark/src/test/scala/geotrellis/spark/store/CoordinateSpaceTimeSpec.scala
  136. +0 −2 spark/src/test/scala/geotrellis/spark/store/CoordinateSpaceTimeTileFeatureSpec.scala
  137. +0 −2 spark/src/test/scala/geotrellis/spark/store/LayerUpdateSpaceTimeTileSpec.scala
  138. +0 −3 spark/src/test/scala/geotrellis/spark/store/cog/COGAttributeStoreSpec.scala
  139. +0 −2 spark/src/test/scala/geotrellis/spark/store/cog/COGLayerMetadataSpec.scala
  140. +10 −11 store/src/main/scala/geotrellis/store/AttributeCaching.scala
  141. +47 −48 store/src/main/scala/geotrellis/store/AttributeStore.scala
  142. +0 −1 store/src/main/scala/geotrellis/store/AttributeStoreProvider.scala
  143. +15 −15 store/src/main/scala/geotrellis/store/CollectionLayerReader.scala
  144. +0 −1 store/src/main/scala/geotrellis/store/CollectionLayerReaderProvider.scala
  145. +3 −4 store/src/main/scala/geotrellis/store/GenericLayerMover.scala
  146. +4 −4 store/src/main/scala/geotrellis/store/LayerCopier.scala
  147. +34 −28 store/src/main/scala/geotrellis/store/LayerHeader.scala
  148. +3 −0 store/src/main/scala/geotrellis/store/LayerId.scala
  149. +10 −11 store/src/main/scala/geotrellis/store/LayerManager.scala
  150. +4 −5 store/src/main/scala/geotrellis/store/LayerMover.scala
  151. +6 −7 store/src/main/scala/geotrellis/store/LayerReindexer.scala
  152. +6 −13 store/src/main/scala/geotrellis/store/LayerType.scala
  153. +3 −2 store/src/main/scala/geotrellis/store/OverzoomingValueReader.scala
  154. +4 −5 store/src/main/scala/geotrellis/store/ValueReader.scala
  155. +18 −16 store/src/main/scala/geotrellis/store/cog/COGCollectionLayerReader.scala
  156. +1 −2 store/src/main/scala/geotrellis/store/cog/COGCollectionLayerReaderProvider.scala
  157. +5 −28 store/src/main/scala/geotrellis/store/cog/COGLayerMetadata.scala
  158. +18 −20 store/src/main/scala/geotrellis/store/cog/COGLayerStorageMetadata.scala
  159. +7 −7 store/src/main/scala/geotrellis/store/cog/COGValueReader.scala
  160. +2 −3 store/src/main/scala/geotrellis/store/cog/OverzoomingCOGValueReader.scala
  161. +2 −17 store/src/main/scala/geotrellis/store/cog/ZoomRange.scala
  162. +1 −2 store/src/main/scala/geotrellis/store/cog/package.scala
  163. +12 −12 store/src/main/scala/geotrellis/store/file/FileAttributeStore.scala
  164. +0 −1 store/src/main/scala/geotrellis/store/file/FileCollectionLayerProvider.scala
  165. +3 −3 store/src/main/scala/geotrellis/store/file/FileCollectionLayerReader.scala
  166. +4 −4 store/src/main/scala/geotrellis/store/file/FileLayerCopier.scala
  167. +27 −33 store/src/main/scala/geotrellis/store/file/FileLayerHeader.scala
  168. +20 −6 store/src/main/scala/geotrellis/store/file/FileLayerMover.scala
  169. +6 −6 store/src/main/scala/geotrellis/store/file/FileValueReader.scala
  170. +4 −5 store/src/main/scala/geotrellis/store/file/cog/FileCOGCollectionLayerReader.scala
  171. +4 −6 store/src/main/scala/geotrellis/store/file/cog/FileCOGValueReader.scala
  172. +10 −9 store/src/main/scala/geotrellis/store/hadoop/HadoopAttributeStore.scala
  173. +4 −6 store/src/main/scala/geotrellis/store/hadoop/HadoopCollectionLayerReader.scala
  174. +3 −4 store/src/main/scala/geotrellis/store/hadoop/HadoopLayerCopier.scala
  175. +29 −32 store/src/main/scala/geotrellis/store/hadoop/HadoopLayerHeader.scala
  176. +3 −4 store/src/main/scala/geotrellis/store/hadoop/HadoopLayerMover.scala
  177. +2 −4 store/src/main/scala/geotrellis/store/hadoop/HadoopValueReader.scala
  178. +2 −1 store/src/main/scala/geotrellis/store/hadoop/Implicits.scala
  179. +3 −6 store/src/main/scala/geotrellis/store/hadoop/cog/HadoopCOGCollectionLayerReader.scala
  180. +3 −5 store/src/main/scala/geotrellis/store/hadoop/cog/HadoopCOGValueReader.scala
  181. +67 −170 store/src/main/scala/geotrellis/store/json/Implicits.scala
  182. +158 −163 store/src/main/scala/geotrellis/store/json/KeyIndexFormats.scala
  183. +56 −58 store/src/test/scala/geotrellis/store/avro/AvroTools.scala
  184. +6 −3 store/src/test/scala/geotrellis/store/json/KeyIndexJsonFormatFactorySpec.scala
  185. +14 −15 store/src/test/scala/geotrellis/store/json/LayerHeaderSpec.scala
  186. +24 −25 store/src/test/scala/geotrellis/store/json/TestKeyIndexRegistrator.scala
  187. +16 −1 vector/src/main/scala/geotrellis/vector/Extent.scala
  188. +75 −61 vector/src/main/scala/geotrellis/vector/io/json/CrsFormats.scala
  189. +65 −57 vector/src/main/scala/geotrellis/vector/io/json/FeatureFormats.scala
  190. +6 −5 vector/src/main/scala/geotrellis/vector/io/json/GeoJson.scala
  191. +1 −3 vector/src/main/scala/geotrellis/vector/io/json/GeoJsonSupport.scala
  192. +165 −174 vector/src/main/scala/geotrellis/vector/io/json/GeometryFormats.scala
  193. +27 −32 vector/src/main/scala/geotrellis/vector/io/json/Implicits.scala
  194. +0 −1 vector/src/main/scala/geotrellis/vector/io/json/JsonCRS.scala
  195. +29 −35 vector/src/main/scala/geotrellis/vector/io/json/JsonFeatureCollection.scala
  196. +42 −40 vector/src/main/scala/geotrellis/vector/io/json/JsonFeatureCollectionMap.scala
  197. +46 −54 vector/src/main/scala/geotrellis/vector/io/json/Style.scala
  198. +4 −3 vector/src/test/scala/spec/geotrellis/vector/ExtentSpec.scala
  199. +1 −2 vector/src/test/scala/spec/geotrellis/vector/interpolation/KrigingVectorSpec.scala
  200. +14 −13 vector/src/test/scala/spec/geotrellis/vector/io/json/FeatureFormatsSpec.scala
  201. +18 −16 vector/src/test/scala/spec/geotrellis/vector/io/json/GeoJsonSpec.scala
  202. +22 −20 vector/src/test/scala/spec/geotrellis/vector/io/json/GeometryFormatsSpec.scala
  203. +12 −10 vector/src/test/scala/spec/geotrellis/vector/io/json/JsonCrsSpec.scala
@@ -24,11 +24,8 @@ import geotrellis.store.avro._
import geotrellis.store.index._
import geotrellis.util._

import org.apache.avro._
import org.apache.spark.SparkContext

import org.apache.spark.rdd.RDD
import spray.json.JsonFormat
import io.circe._

import scala.reflect.ClassTag

@@ -38,9 +35,9 @@ class AccumuloLayerCopier(
getLayerWriter: LayerId => AccumuloLayerWriter
) extends LayerCopier[LayerId] {
def copy[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Encoder: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]
M: Encoder: Decoder: Component[?, Bounds[K]]
](from: LayerId, to: LayerId): Unit = {
if (!attributeStore.layerExists(from)) throw new LayerNotFoundError(from)
if (attributeStore.layerExists(to)) throw new LayerExistsError(to)
@@ -28,8 +28,7 @@ import geotrellis.util._

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD

import spray.json.JsonFormat
import io.circe._

import scala.reflect.ClassTag

@@ -39,30 +38,30 @@ class AccumuloLayerManager(attributeStore: AccumuloAttributeStore, instance: Acc
AccumuloLayerDeleter(attributeStore, instance).delete(id)

def copy[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Encoder: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]
M: Encoder: Decoder: Component[?, Bounds[K]]
](from: LayerId, to: LayerId): Unit =
AccumuloLayerCopier(instance).copy[K, V, M](from, to)

def move[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Encoder: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]
M: Encoder: Decoder: Component[?, Bounds[K]]
](from: LayerId, to: LayerId): Unit =
AccumuloLayerMover(instance).move[K, V, M](from, to)

def reindex[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Encoder: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]
M: Encoder: Decoder: Component[?, Bounds[K]]
](id: LayerId, keyIndexMethod: KeyIndexMethod[K]): Unit =
AccumuloLayerReindexer(instance).reindex[K, V, M](id, keyIndexMethod)

def reindex[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Encoder: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]
M: Encoder: Decoder: Component[?, Bounds[K]]
](id: LayerId, keyIndex: KeyIndex[K]): Unit =
AccumuloLayerReindexer(instance).reindex[K, V, M](id, keyIndex)
}
@@ -27,8 +27,6 @@ import geotrellis.util._
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD

import spray.json.JsonFormat

import scala.reflect.ClassTag

object AccumuloLayerMover {
@@ -27,8 +27,7 @@ import geotrellis.util._
import org.apache.hadoop.io.Text
import org.apache.spark.SparkContext
import org.apache.accumulo.core.data.{Range => AccumuloRange}

import spray.json._
import io.circe._

import scala.reflect._

@@ -38,9 +37,9 @@ class AccumuloLayerReader(val attributeStore: AttributeStore)(implicit sc: Spark
val defaultNumPartitions = sc.defaultParallelism

def read[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]
M: Decoder: Component[?, Bounds[K]]
](id: LayerId, tileQuery: LayerQuery[K, M], numPartitions: Int, filterIndexOnly: Boolean) = {
if (!attributeStore.layerExists(id)) throw new LayerNotFoundError(id)

@@ -21,16 +21,13 @@ import geotrellis.store._
import geotrellis.store.accumulo._
import geotrellis.store.avro._
import geotrellis.store.index._
import geotrellis.layer.json._
import geotrellis.spark.store._
import geotrellis.util._

import org.apache.spark.SparkContext

import spray.json.JsonFormat
import io.circe._

import java.time.ZonedDateTime

import scala.reflect.ClassTag

object AccumuloLayerReindexer {
@@ -63,9 +60,9 @@ class AccumuloLayerReindexer(
id.copy(name = s"${id.name}-${ZonedDateTime.now.toInstant.toEpochMilli}")

def reindex[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Encoder: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]
M: Encoder: Decoder: Component[?, Bounds[K]]
](id: LayerId, keyIndex: KeyIndex[K]): Unit = {
if (!attributeStore.layerExists(id)) throw new LayerNotFoundError(id)
val tmpId = getTmpId(id)
@@ -85,9 +82,9 @@ class AccumuloLayerReindexer(
}

def reindex[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Encoder: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]
M: Encoder: Decoder: Component[?, Bounds[K]]
](id: LayerId, keyIndexMethod: KeyIndexMethod[K]): Unit = {
if (!attributeStore.layerExists(id)) throw new LayerNotFoundError(id)
val tmpId = getTmpId(id)
@@ -28,10 +28,8 @@ import geotrellis.spark.merge._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging

import org.apache.spark.rdd.RDD

import spray.json._
import io.circe._

import scala.reflect._

@@ -44,9 +42,9 @@ class AccumuloLayerWriter(

// Layer Updating
def overwrite[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Encoder: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]: Mergable
M: Encoder: Decoder: Component[?, Bounds[K]]: Mergable
](
id: LayerId,
rdd: RDD[(K, V)] with Metadata[M]
@@ -55,9 +53,9 @@ class AccumuloLayerWriter(
}

def update[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Encoder: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]: Mergable
M: Encoder: Decoder: Component[?, Bounds[K]]: Mergable
](
id: LayerId,
rdd: RDD[(K, V)] with Metadata[M],
@@ -67,9 +65,9 @@ class AccumuloLayerWriter(
}

private def update[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Encoder: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]: Mergable
M: Encoder: Decoder: Component[?, Bounds[K]]: Mergable
](
id: LayerId,
rdd: RDD[(K, V)] with Metadata[M],
@@ -100,9 +98,9 @@ class AccumuloLayerWriter(

// Layer Writing
protected def _write[
K: AvroRecordCodec: JsonFormat: ClassTag,
K: AvroRecordCodec: Encoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]
M: Encoder: Component[?, Bounds[K]]
](id: LayerId, rdd: RDD[(K, V)] with Metadata[M], keyIndex: KeyIndex[K]): Unit = {
val codec = KeyValueRecordCodec[K, V]
val schema = codec.schema
@@ -34,7 +34,6 @@ import org.apache.spark.SparkContext
import scala.collection.JavaConverters._
import scala.reflect.ClassTag


object AccumuloRDDReader {
def read[K: Boundable: AvroRecordCodec: ClassTag, V: AvroRecordCodec: ClassTag](
table: String,
@@ -20,19 +20,19 @@ import geotrellis.layer._
import geotrellis.store._
import geotrellis.store.accumulo.conf.AccumuloConfig

import spray.json._
import spray.json.DefaultJsonProtocol._

import org.apache.accumulo.core.client.{BatchWriterConfig, Connector}
import org.apache.accumulo.core.security.Authorizations
import org.apache.accumulo.core.data._
import org.apache.accumulo.core.client.IteratorSetting
import org.apache.accumulo.core.iterators.user.RegExFilter
import org.apache.hadoop.io.Text
import io.circe._
import io.circe.syntax._
import io.circe.parser._
import cats.syntax.either._

import scala.collection.JavaConverters._


object AccumuloAttributeStore {
def apply(connector: Connector, attributeTable: String): AccumuloAttributeStore =
new AccumuloAttributeStore(connector, attributeTable)
@@ -87,29 +87,29 @@ class AccumuloAttributeStore(val connector: Connector, val attributeTable: Strin
} finally deleter.close()
}

def read[T: JsonFormat](layerId: LayerId, attributeName: String): T = {
def read[T: Decoder](layerId: LayerId, attributeName: String): T = {
val values = fetch(Some(layerId), attributeName).toVector

if(values.isEmpty) {
throw new AttributeNotFoundError(attributeName, layerId)
} else if(values.size > 1) {
throw new LayerIOError(s"Multiple attributes found for $attributeName for layer $layerId")
} else {
values.head.toString.parseJson.convertTo[(LayerId, T)]._2
parse(values.head.toString).flatMap(_.as[(LayerId, T)]).valueOr(throw _)._2
}
}

def readAll[T: JsonFormat](attributeName: String): Map[LayerId,T] = {
def readAll[T: Decoder](attributeName: String): Map[LayerId,T] = {
fetch(None, attributeName)
.map { _.toString.parseJson.convertTo[(LayerId, T)] }
.map { r => parse(r.toString).flatMap(_.as[(LayerId, T)]).valueOr(throw _) }
.toMap
}

def write[T: JsonFormat](layerId: LayerId, attributeName: String, value: T): Unit = {
def write[T: Encoder](layerId: LayerId, attributeName: String, value: T): Unit = {
val mutation = new Mutation(layerIdText(layerId))
mutation.put(
new Text(attributeName), new Text(), System.currentTimeMillis(),
new Value((layerId, value).toJson.compactPrint.getBytes)
new Value((layerId, value).asJson.noSpaces.getBytes)
)

connector.write(attributeTable, mutation)
@@ -23,7 +23,6 @@ import geotrellis.util.UriUtils

import java.net.URI


/**
* Provides [[AccumuloAttributeStore]] instance for URI with `accumulo` scheme.
* ex: `accumulo://[user[:password]@]zookeeper/instance-name[?attributes=table1[&layers=table2]]`
@@ -23,17 +23,16 @@ import geotrellis.util._

import org.apache.accumulo.core.data.{Range => AccumuloRange}
import org.apache.hadoop.io.Text

import spray.json._
import _root_.io.circe._

import scala.reflect._

class AccumuloCollectionLayerReader(val attributeStore: AttributeStore)(implicit instance: AccumuloInstance) extends CollectionLayerReader[LayerId] {

def read[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
K: AvroRecordCodec: Boundable: Decoder: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: Component[?, Bounds[K]]
M: Decoder: Component[?, Bounds[K]]
](id: LayerId, rasterQuery: LayerQuery[K, M], filterIndexOnly: Boolean) = {
if (!attributeStore.layerExists(id)) throw new LayerNotFoundError(id)

@@ -19,7 +19,9 @@ package geotrellis.store.accumulo
import geotrellis.layer._
import geotrellis.store._

import spray.json._
import io.circe._
import io.circe.syntax._
import cats.syntax.either._

case class AccumuloLayerHeader(
keyClass: String,
@@ -31,34 +33,29 @@ case class AccumuloLayerHeader(
}

object AccumuloLayerHeader {
implicit object AccumuloLayerMetadataFormat extends RootJsonFormat[AccumuloLayerHeader] {
def write(md: AccumuloLayerHeader) =
JsObject(
"format" -> JsString(md.format),
"keyClass" -> JsString(md.keyClass),
"valueClass" -> JsString(md.valueClass),
"tileTable" -> JsString(md.tileTable),
"layerType" -> md.layerType.toJson
implicit val accumuloLayerHeaderEncoder: Encoder[AccumuloLayerHeader] =
Encoder.encodeJson.contramap[AccumuloLayerHeader] { obj =>
Json.obj(
"keyClass" -> obj.keyClass.asJson,
"valueClass" -> obj.valueClass.asJson,
"tileTable" -> obj.tileTable.asJson,
"layerType" -> obj.layerType.asJson,
"format" -> obj.format.asJson
)

def read(value: JsValue): AccumuloLayerHeader =
value.asJsObject.getFields("keyClass", "valueClass", "tileTable", "layerType") match {
case Seq(JsString(keyClass), JsString(valueClass), JsString(tileTable), layerType) =>
AccumuloLayerHeader(
keyClass,
valueClass,
tileTable,
layerType.convertTo[LayerType]
)
case Seq(JsString(keyClass), JsString(valueClass), JsString(tileTable)) =>
AccumuloLayerHeader(
keyClass,
valueClass,
tileTable,
AvroLayerType
)
case _ =>
throw new DeserializationException(s"AccumuloLayerHeader expected, got: $value")
}
}
}
implicit val accumuloLayerHeaderDecoder: Decoder[AccumuloLayerHeader] =
Decoder.decodeHCursor.emap { c =>
c.downField("format").as[String].flatMap {
case "accumulo" =>
(c.downField("keyClass").as[String],
c.downField("valueClass").as[String],
c.downField("tileTable").as[String],
c.downField("layerType").as[LayerType]) match {
case (Right(f), Right(kc), Right(t), Right(lt)) => Right(AccumuloLayerHeader(f, kc, t, lt))
case (Right(f), Right(kc), Right(t), _) => Right(AccumuloLayerHeader(f, kc, t, AvroLayerType))
case _ => Left(s"AccumuloLayerHeader expected, got: ${c.focus}")
}
case _ => Left(s"AccumuloLayerHeader expected, got: ${c.focus}")
}.leftMap(_ => s"AccumuloLayerHeader expected, got: ${c.focus}")
}
}
@@ -25,7 +25,6 @@ import org.apache.hadoop.io.Text

import scala.collection.JavaConverters._


object AccumuloUtils {
/**
* Mapping KeyBounds of Extent to SFC ranges will often result in a set of non-contigrious ranges.
@@ -40,7 +39,7 @@ object AccumuloUtils {
* more than one tablet server to participate in the ingestion.
*
* @param tableName The name of the table to be split
* @param accumuloInstnace The Accumulo instance associated with the ingest
* @param accumuloInstance The Accumulo instance associated with the ingest
* @param keyBounds The KeyBounds of the RDD that is being stored in the table
* @param keyIndexer The indexing scheme used to turn keys K into Accumulo keys
* @param count The number of tablets to split the table into
@@ -60,5 +59,4 @@ object AccumuloUtils {

ops.addSplits(tableName, new java.util.TreeSet(splits.asJava))
}

}

0 comments on commit c5775b1

Please sign in to comment.
You can’t perform that action at this time.