From b52f70c65e226d5f319baa6ab6744c2c5a5e3193 Mon Sep 17 00:00:00 2001 From: Eugene Cheipesh Date: Fri, 30 Apr 2021 11:08:04 -0400 Subject: [PATCH] Replace geowave subproject with GeoTrellis/GeoWave data adapter (#3364) * Replace geowave subproject with GeoTrellis / GeoWave data adapter * Update geowave module description * Add geowave to cassandra tests * Downgrade JTS to 1.16 to avoid bin-compat problem with GeoWave * Downgrade GeoTools version to 23.2 * Fix GeoWave builds * Fix GeoTools deps * Move GeoWave into a separate executor, rm BlockingThreadPool implementation from benchmarks, generate missing headers * Update SBT plugins * Upd GeoWave syntax to match Scala 2.13 Co-authored-by: Grigory Pomadchin --- .circleci/build-and-test-geowave.sh | 6 + .circleci/config.yml | 25 ++ .locationtech/deploy-212.sh | 1 + CHANGELOG.md | 7 +- .../accumulo/AccumuloCollectionReader.scala | 2 +- build.sbt | 26 +- .../cassandra/CassandraCollectionReader.scala | 2 +- docs/guide/module-hierarchy.rst | 2 +- .../spark/gdal/GDALRasterSourceRDDSpec.scala | 6 +- geowave/Makefile | 2 + geowave/README.md | 55 ++++ geowave/benchmark/README.md | 99 ++++++ .../src/main/resources/application.conf | 12 + .../benchmark/src/main/resources/logback.xml | 31 ++ .../geowave/BenchmarkEnvironment.scala | 30 ++ .../geowave/IngestBenchmarkData.scala | 66 ++++ .../geotrellis/geowave/QueryBenchmark.scala | 240 ++++++++++++++ geowave/docker-compose.override.yml | 9 + geowave/docker-compose.yml | 13 + geowave/docs/README.md | 173 ++++++++++ geowave/docs/schemas/delete-message.md | 203 ++++++++++++ geowave/docs/schemas/index-message.md | 116 +++++++ geowave/docs/schemas/ingest-message.md | 149 +++++++++ ...wave.adapter.GeoTrellisDataAdapterProvider | 1 + ...s.geowave.adapter.HandlersRegistryProvider | 1 + ....core.index.persist.PersistableRegistrySpi | 1 + ...re.store.spi.DimensionalityTypeProviderSpi | 1 + .../resources/json/delete-message.schema.json | 108 +++++++ .../resources/json/index-message.schema.json | 82 +++++ .../resources/json/ingest-message.schema.json | 61 ++++ .../main/resources/json/message.schema.json | 41 +++ geowave/src/main/resources/reference.conf | 12 + .../GeoTrellisPersistableRegistry.scala | 60 ++++ .../scala/geotrellis/geowave/Implicits.scala | 94 ++++++ .../geowave/adapter/AvroFieldReader.scala | 25 ++ .../geowave/adapter/AvroFieldWriter.scala | 28 ++ .../geowave/adapter/DataTypeRegistry.scala | 35 ++ .../adapter/ElevationFieldHandler.scala | 24 ++ .../adapter/GeoTrellisDataAdapter.scala | 221 +++++++++++++ .../GeoTrellisDataAdapterProvider.scala | 24 ++ .../adapter/GeometryFieldHandler.scala | 24 ++ .../geowave/adapter/IndexFieldHandler.scala | 45 +++ .../adapter/TimestampFieldHandler.scala | 24 ++ .../adapter/geotiff/GeoTiffAdapter.scala | 91 ++++++ .../geotiff/GeoTiffAdapterProvider.scala | 25 ++ .../adapter/geotiff/GeoTiffFieldReader.scala | 26 ++ .../adapter/geotiff/GeoTiffFieldWriter.scala | 26 ++ .../adapter/geotiff}/package.scala | 15 +- .../geotrellis/geowave/adapter/package.scala | 48 +++ .../raster/MulitbandRasterAdapter.scala | 57 ++++ .../raster/MultibandRasterReader.scala | 27 ++ .../raster/MultibandRasterWriter.scala | 27 ++ .../adapter/raster/avro/Implicits.scala | 21 ++ .../adapter/raster/avro/RasterCodec.scala | 47 +++ .../geowave/adapter/raster/avro/package.scala | 19 ++ .../geowave/api/SQueryBuilder.scala | 23 ++ .../geotrellis/geowave/api/package.scala | 27 ++ .../geotrellis/geowave/conf/Implicits.scala | 27 ++ .../geowave/conf/StoreConfiguration.scala | 42 +++ .../geotrellis/geowave/conf/package.scala | 19 ++ .../geowave/dsl/DataAdapterParameters.scala | 51 +++ .../geowave/dsl/DataTypeReader.scala | 33 ++ .../geowave/dsl/DeleteParameters.scala | 76 +++++ .../geowave/dsl/IndexParameters.scala | 92 ++++++ .../geowave/dsl/IngestParameters.scala | 50 +++ .../geotrellis/geowave/dsl/Metadata.scala | 30 ++ .../geowave/dsl/QueryConfiguration.scala | 44 +++ .../geotrellis/geowave/dsl/TilingBounds.scala | 28 ++ .../geotrellis/geowave/dsl/VoxelBounds.scala | 28 ++ .../geowave/dsl/VoxelBounds2D.scala | 59 ++++ .../geowave/dsl/VoxelBounds3D.scala | 67 ++++ .../geowave/dsl/VoxelBounds4D.scala | 76 +++++ .../geowave/dsl/VoxelDimensions.scala | 49 +++ .../geowave/dsl/avro/GeometryCodecs.scala | 101 ++++++ .../geowave/dsl/avro/Implicits.scala | 21 ++ .../geowave/dsl/avro/VoxelBoundsCodec.scala | 120 +++++++ .../dsl/avro/VoxelDimensionsCodec.scala | 93 ++++++ .../geotrellis/geowave/dsl/avro/package.scala | 19 ++ .../geowave/dsl/json/Implicits.scala | 88 +++++ .../geowave/dsl/json/JsonValidator.scala | 58 ++++ .../dsl/json/JsonValidatorErrors.scala | 33 ++ .../geotrellis/geowave/dsl/json/package.scala | 19 ++ .../geotrellis/geowave/dsl/package.scala | 61 ++++ .../geowave/dsl/syntax/Implicits.scala | 32 ++ .../geowave/dsl/syntax/package.scala | 19 ++ ...SpatialTemporalElevationIndexBuilder.scala | 27 ++ ...alTemporalElevationIndexTypeProvider.scala | 129 ++++++++ .../SpatialTemporalElevationOptions.scala | 77 +++++ .../geowave/index/dimension/Elevation.scala | 89 +++++ .../index/dimension/ElevationDefinition.scala | 25 ++ .../index/dimension/ElevationReader.scala | 23 ++ .../index/dimension/ElevationWriter.scala | 23 ++ .../geowave/index/field/ElevationField.scala | 116 +++++++ .../query/ExplicitSpatialElevationQuery.scala | 44 +++ ...xplicitSpatialTemporalElevationQuery.scala | 53 +++ .../geowave/ingest/ConfigureIndex.scala | 49 +++ .../geowave/ingest/ExecuteQuery.scala} | 24 +- .../geowave/ingest/IngestGeoTiff.scala | 91 ++++++ .../ingest/IngestGeoTiffMetadata.scala | 43 +++ .../scala/geotrellis/geowave/package.scala | 19 ++ .../geowave/utils/DoubleUtils.scala | 28 ++ .../geowave/utils/GeodesicUtils.scala | 39 +++ .../geotrellis/geowave/utils/ListUtils.scala | 55 ++++ .../geotrellis/geowave/utils/package.scala} | 28 +- .../store/geowave/GeoWaveAttributeStore.scala | 277 ---------------- .../geowave/GeoWaveFeatureRDDReader.scala | 121 ------- .../geowave/GeoWaveFeatureRDDWriter.scala | 122 ------- .../store/geowave/GeoWaveLayerReader.scala | 284 ---------------- .../store/geowave/GeoWaveLayerWriter.scala | 304 ------------------ .../spark/store/geowave/GeoWaveUtil.scala | 41 --- .../geowave/SerializablePersistable.scala | 53 --- .../store/kryo/GeoWaveKryoRegistrator.scala | 102 ------ geowave/src/test/resources/logback.xml | 31 ++ .../src/test/resources/raster/all-ones.tif | Bin 0 -> 7040 bytes geowave/src/test/resources/reference.conf | 13 + .../geotrellis/geowave/TestEnvironment.scala | 35 ++ .../GeoTrellisDataAdapterSPISpec.scala | 45 +++ .../adapter/geotiff/GeoTiffAdapterSpec.scala | 153 +++++++++ .../IMMultibandGeoTiffAdapterSpec.scala | 155 +++++++++ ...lTemporalElevationGeoTiffAdapterSpec.scala | 246 ++++++++++++++ .../SpatialTemporalGeoTiffAdapterSpec.scala | 206 ++++++++++++ .../raster/IMMultibandRasterAdapterSpec.scala | 154 +++++++++ .../raster/MultibandRasterAdapterSpec.scala | 153 +++++++++ .../geotrellis/geowave/dsl/MessagesSpec.scala | 173 ++++++++++ .../geowave/dsl/VoxelBoundsSpec.scala | 68 ++++ .../geowave/ingest/IngestGeoTiffSpec.scala | 303 +++++++++++++++++ .../geowave/GeoWaveFeatureRDDReaderSpec.scala | 120 ------- .../store/geowave/GeoWaveSpatialSpec.scala | 123 ------- .../geotrellis/layer/LayoutTileSource.scala | 4 +- project/Dependencies.scala | 39 ++- project/Settings.scala | 103 ++---- project/build.properties | 2 +- project/plugins.sbt | 12 +- .../geotrellis/raster/RasterSource.scala | 8 +- .../geotrellis/raster/crop/CropMethods.scala | 2 +- .../raster/io/geotiff/LazySegmentBytes.scala | 2 +- .../raster/io/geotiff/MultibandGeoTiff.scala | 2 +- .../raster/io/geotiff/SinglebandGeoTiff.scala | 2 +- .../io/geotiff/writer/GeoTiffWriter.scala | 2 +- ...lerSpec.scala => GeoTiffBuilderSpec.scala} | 4 +- .../spark/store/s3/S3RDDReader.scala | 2 +- .../store/s3/S3CollectionReader.scala | 2 +- sbt | 4 +- .../geotrellis/spark/RasterSourceRDD.scala | 2 +- .../spark/rasterize/RasterizeRDD.scala | 4 +- .../spark/store/cog/COGLayerReader.scala | 2 +- .../spark/store/file/FileRDDReader.scala | 2 +- .../spark/store/hadoop/HadoopRDDWriter.scala | 2 +- .../geotrellis/spark/RasterRegionSpec.scala | 2 +- .../store/cog/COGCollectionLayerReader.scala | 2 +- .../store/file/FileCollectionReader.scala | 2 +- .../store/hadoop/HadoopCollectionReader.scala | 2 +- .../store/util/BlockingThreadPool.scala | 2 +- .../scala/geotrellis/store/util/IOUtils.scala | 2 +- 154 files changed, 6840 insertions(+), 1739 deletions(-) create mode 100755 .circleci/build-and-test-geowave.sh create mode 100644 geowave/Makefile create mode 100644 geowave/README.md create mode 100644 geowave/benchmark/README.md create mode 100644 geowave/benchmark/src/main/resources/application.conf create mode 100644 geowave/benchmark/src/main/resources/logback.xml create mode 100644 geowave/benchmark/src/main/scala/geotrellis/geowave/BenchmarkEnvironment.scala create mode 100644 geowave/benchmark/src/main/scala/geotrellis/geowave/IngestBenchmarkData.scala create mode 100644 geowave/benchmark/src/main/scala/geotrellis/geowave/QueryBenchmark.scala create mode 100644 geowave/docker-compose.override.yml create mode 100755 geowave/docker-compose.yml create mode 100644 geowave/docs/README.md create mode 100644 geowave/docs/schemas/delete-message.md create mode 100644 geowave/docs/schemas/index-message.md create mode 100644 geowave/docs/schemas/ingest-message.md create mode 100644 geowave/src/main/resources/META-INF/services/geotrellis.geowave.adapter.GeoTrellisDataAdapterProvider create mode 100644 geowave/src/main/resources/META-INF/services/geotrellis.geowave.adapter.HandlersRegistryProvider create mode 100644 geowave/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi create mode 100644 geowave/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi create mode 100644 geowave/src/main/resources/json/delete-message.schema.json create mode 100644 geowave/src/main/resources/json/index-message.schema.json create mode 100644 geowave/src/main/resources/json/ingest-message.schema.json create mode 100644 geowave/src/main/resources/json/message.schema.json create mode 100644 geowave/src/main/resources/reference.conf create mode 100644 geowave/src/main/scala/geotrellis/geowave/GeoTrellisPersistableRegistry.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/Implicits.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/AvroFieldReader.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/AvroFieldWriter.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/DataTypeRegistry.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/ElevationFieldHandler.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapter.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapterProvider.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/GeometryFieldHandler.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/IndexFieldHandler.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/TimestampFieldHandler.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapter.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapterProvider.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffFieldReader.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffFieldWriter.scala rename geowave/src/main/scala/geotrellis/{spark/store/geowave => geowave/adapter/geotiff}/package.scala (52%) create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/package.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/raster/MulitbandRasterAdapter.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/raster/MultibandRasterReader.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/raster/MultibandRasterWriter.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/Implicits.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/RasterCodec.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/package.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/api/SQueryBuilder.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/api/package.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/conf/Implicits.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/conf/StoreConfiguration.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/conf/package.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/DataAdapterParameters.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/DataTypeReader.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/DeleteParameters.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/IndexParameters.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/IngestParameters.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/Metadata.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/QueryConfiguration.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/TilingBounds.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds2D.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds3D.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds4D.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/VoxelDimensions.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/avro/GeometryCodecs.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/avro/Implicits.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/avro/VoxelBoundsCodec.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/avro/VoxelDimensionsCodec.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/avro/package.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/json/Implicits.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/json/JsonValidator.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/json/JsonValidatorErrors.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/json/package.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/package.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/syntax/Implicits.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/dsl/syntax/package.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationIndexBuilder.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationIndexTypeProvider.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationOptions.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/index/dimension/Elevation.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationDefinition.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationReader.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationWriter.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/index/field/ElevationField.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/index/query/ExplicitSpatialElevationQuery.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/index/query/ExplicitSpatialTemporalElevationQuery.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/ingest/ConfigureIndex.scala rename geowave/src/{test/scala/geotrellis/spark/GeoWaveTestEnvironment.scala => main/scala/geotrellis/geowave/ingest/ExecuteQuery.scala} (54%) create mode 100644 geowave/src/main/scala/geotrellis/geowave/ingest/IngestGeoTiff.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/ingest/IngestGeoTiffMetadata.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/package.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/utils/DoubleUtils.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/utils/GeodesicUtils.scala create mode 100644 geowave/src/main/scala/geotrellis/geowave/utils/ListUtils.scala rename geowave/src/{test/scala/geotrellis/spark/store/geowave/GeoWaveAttributeStoreSpec.scala => main/scala/geotrellis/geowave/utils/package.scala} (53%) delete mode 100644 geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveAttributeStore.scala delete mode 100644 geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDReader.scala delete mode 100644 geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDWriter.scala delete mode 100644 geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveLayerReader.scala delete mode 100644 geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveLayerWriter.scala delete mode 100644 geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveUtil.scala delete mode 100644 geowave/src/main/scala/geotrellis/spark/store/geowave/SerializablePersistable.scala delete mode 100644 geowave/src/main/scala/geotrellis/spark/store/kryo/GeoWaveKryoRegistrator.scala create mode 100644 geowave/src/test/resources/logback.xml create mode 100644 geowave/src/test/resources/raster/all-ones.tif create mode 100644 geowave/src/test/resources/reference.conf create mode 100644 geowave/src/test/scala/geotrellis/geowave/TestEnvironment.scala create mode 100644 geowave/src/test/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapterSPISpec.scala create mode 100644 geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapterSpec.scala create mode 100644 geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/IMMultibandGeoTiffAdapterSpec.scala create mode 100644 geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/SpatialTemporalElevationGeoTiffAdapterSpec.scala create mode 100644 geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/SpatialTemporalGeoTiffAdapterSpec.scala create mode 100644 geowave/src/test/scala/geotrellis/geowave/adapter/raster/IMMultibandRasterAdapterSpec.scala create mode 100644 geowave/src/test/scala/geotrellis/geowave/adapter/raster/MultibandRasterAdapterSpec.scala create mode 100644 geowave/src/test/scala/geotrellis/geowave/dsl/MessagesSpec.scala create mode 100644 geowave/src/test/scala/geotrellis/geowave/dsl/VoxelBoundsSpec.scala create mode 100644 geowave/src/test/scala/geotrellis/geowave/ingest/IngestGeoTiffSpec.scala delete mode 100644 geowave/src/test/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDReaderSpec.scala delete mode 100644 geowave/src/test/scala/geotrellis/spark/store/geowave/GeoWaveSpatialSpec.scala rename raster/src/test/scala/geotrellis/raster/io/geotiff/{GeoTiffBuilerSpec.scala => GeoTiffBuilderSpec.scala} (96%) diff --git a/.circleci/build-and-test-geowave.sh b/.circleci/build-and-test-geowave.sh new file mode 100755 index 0000000000..a52b86a07b --- /dev/null +++ b/.circleci/build-and-test-geowave.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +.circleci/unzip-rasters.sh + +./sbt -Dsbt.supershell=false "++$SCALA_VERSION" \ + "project geowave" test || { exit 1; } diff --git a/.circleci/config.yml b/.circleci/config.yml index 9104f776cd..9f5ce973d1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -137,6 +137,21 @@ jobs: .circleci/build-and-test-accumulo.sh - save_cache: *save_build_cache + geowave: + parameters: + scala-version: + type: string + executor: executor-cassandra + steps: + - checkout + - restore_cache: *restore_build_cache + - run: + name: Test Cassandra + command: | + export SCALA_VERSION=<< parameters.scala-version >> + .circleci/build-and-test-geowave.sh + - save_cache: *save_build_cache + scaladocs: parameters: scala-version: @@ -223,6 +238,16 @@ workflows: tags: only: /^v.*/ + - geowave: + matrix: + parameters: + scala-version: [ "2.12.13", "2.13.5" ] + filters: + branches: + only: /.*/ + tags: + only: /^v.*/ + - scaladocs: matrix: parameters: diff --git a/.locationtech/deploy-212.sh b/.locationtech/deploy-212.sh index d4a0337337..7c3f8b7645 100755 --- a/.locationtech/deploy-212.sh +++ b/.locationtech/deploy-212.sh @@ -17,6 +17,7 @@ && ./sbt "project hbase-spark" publish -no-colors -J-Drelease=locationtech \ && ./sbt "project cassandra" publish -no-colors -J-Drelease=locationtech \ && ./sbt "project cassandra-spark" publish -no-colors -J-Drelease=locationtech \ + && ./sbt "project geowave" publish -no-colors -J-Drelease=locationtech && ./sbt "project geotools" publish -no-colors -J-Drelease=locationtech \ && ./sbt "project shapefile" publish -no-colors -J-Drelease=locationtech \ && ./sbt "project layer" publish -no-colors -J-Drelease=locationtech \ diff --git a/CHANGELOG.md b/CHANGELOG.md index 41fd2608f4..c84ed0ed89 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - S3LayerDeleter cannot handle over 1000 objects to delete [#3371](https://github.com/locationtech/geotrellis/issues/3371) - Drop Scala 2.11 cross compilation [#3259](https://github.com/locationtech/geotrellis/issues/3259) - Fix MosaicRasterSource.tileToLayout behavior [#3338](https://github.com/locationtech/geotrellis/pull/3338) +- Replace geowave subproject with GeoTrellis/GeoWave data adapter [#3364](https://github.com/locationtech/geotrellis/pull/3364) ## [3.5.2] - 2021-02-01 @@ -40,8 +41,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fix `LayoutTileSource` buffer should only be 1/2 a cellsize to avoid going out of bounds and creating `NODATA` values [#3302](https://github.com/locationtech/geotrellis/pull/3302) - Remove unused allocation from CroppedTile [#3297](https://github.com/locationtech/geotrellis/pull/3297) - Fix GeometryCollection::getAll extension method [#3295](https://github.com/locationtech/geotrellis/pull/3295) -- Update gdal-warp-bindings v1.1.1 [#3303](https://github.com/locationtech/geotrellis/pull/3303) - - gdal-warp-bindings 1.1.1 is a bugfix release that addresses a crash when initializing the bindings on MacOS. See: +- Update gdal-warp-bindings v1.1.1 [#3303](https://github.com/locationtech/geotrellis/pull/3303) + - gdal-warp-bindings 1.1.1 is a bugfix release that addresses a crash when initializing the bindings on MacOS. See: - https://github.com/geotrellis/gdal-warp-bindings#macos - https://github.com/geotrellis/gdal-warp-bindings/pull/99 @@ -80,7 +81,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - GDALRasterSource works inconsistenly with BitCellType and ByteCellType [#3232](https://github.com/locationtech/geotrellis/issues/3232) - rasterizeWithValue accepts only topologically valid polygons [#3236](https://github.com/locationtech/geotrellis/pull/3236) - Rasterizer.rasterize should be consistent with rasterizeWithValue [#3238](https://github.com/locationtech/geotrellis/pull/3238) -- GeoTrellisRasterSource should return None on empty reads [#3240](https://github.com/locationtech/geotrellis/pull/3240) +- GeoTrellisRasterSource should return None on empty reads [#3240](https://github.com/locationtech/geotrellis/pull/3240) - ArrayTile equals method always returns true if first elements are NaN [#3242](https://github.com/locationtech/geotrellis/issues/3242) - Fixed resource issue with JpegDecompressor that was causing a "too many open files in the system" exception on many parallel reads of JPEG compressed GeoTiffs. [#3249](https://github.com/locationtech/geotrellis/pull/3249) - Fix MosaicRasterSource, GDALRasterSource and GeoTiffResampleRasterSource behavior [#3252](https://github.com/locationtech/geotrellis/pull/3252) diff --git a/accumulo/src/main/scala/geotrellis/store/accumulo/AccumuloCollectionReader.scala b/accumulo/src/main/scala/geotrellis/store/accumulo/AccumuloCollectionReader.scala index 5fe71a4a54..7fb4c80a29 100644 --- a/accumulo/src/main/scala/geotrellis/store/accumulo/AccumuloCollectionReader.scala +++ b/accumulo/src/main/scala/geotrellis/store/accumulo/AccumuloCollectionReader.scala @@ -48,7 +48,7 @@ object AccumuloCollectionReader { val codec = KeyValueRecordCodec[K, V] val includeKey = (key: K) => queryKeyBounds.includeKey(key) - val ranges = queryKeyBounds.flatMap(decomposeBounds).toIterator + val ranges = queryKeyBounds.flatMap(decomposeBounds).iterator implicit val ec = executionContext implicit val cs = IO.contextShift(ec) diff --git a/build.sbt b/build.sbt index 1abd4a845f..524f00adb2 100644 --- a/build.sbt +++ b/build.sbt @@ -14,6 +14,7 @@ lazy val root = Project("geotrellis", file(".")) gdal, `gdal-spark`, geotools, + geowave, hbase, `hbase-spark`, layer, @@ -145,7 +146,7 @@ lazy val `hbase-spark` = project .settings(projectDependencies := { Seq((hbase / projectID).value, (spark / projectID).value.exclude("com.google.protobuf", "protobuf-java")) }) .settings(Settings.`hbase-spark`) -lazy val `spark-pipeline` = Project(id = "spark-pipeline", base = file("spark-pipeline")). +lazy val `spark-pipeline` = project. dependsOn(spark, `s3-spark`, `spark-testkit` % "test"). settings(Settings.`spark-pipeline`) @@ -155,24 +156,15 @@ lazy val geotools = project ) .settings(Settings.geotools) -/* lazy val geomesa = project - .dependsOn(`spark-testkit` % Test, spark, geotools, `accumulo-spark`) - .settings(Settings.geomesa) - .settings( - scalaVersion := "2.11.12", - crossScalaVersions := Seq("2.11.12") - ) - lazy val geowave = project - .dependsOn( - proj4, raster, layer, store, accumulo, - `spark-testkit` % Test, geotools - ) + .dependsOn(raster, store, `raster-testkit` % Test) .settings(Settings.geowave) - .settings( - scalaVersion := "2.11.12", - crossScalaVersions := Seq("2.11.12") - ) */ + +lazy val `geowave-benchmark` = (project in file("geowave/benchmark")) + .dependsOn(geowave) + .enablePlugins(JmhPlugin) + .settings(Settings.geowaveBenchmark) + .settings(publish / skip := true) lazy val shapefile = project .dependsOn(raster, `raster-testkit` % Test) diff --git a/cassandra/src/main/scala/geotrellis/store/cassandra/CassandraCollectionReader.scala b/cassandra/src/main/scala/geotrellis/store/cassandra/CassandraCollectionReader.scala index dd2a2278a9..0fe4ac424e 100644 --- a/cassandra/src/main/scala/geotrellis/store/cassandra/CassandraCollectionReader.scala +++ b/cassandra/src/main/scala/geotrellis/store/cassandra/CassandraCollectionReader.scala @@ -68,7 +68,7 @@ object CassandraCollectionReader { instance.withSessionDo { session => val statement = session.prepare(query) - IOUtils.parJoin[K, V](ranges.toIterator){ index: BigInt => + IOUtils.parJoin[K, V](ranges.iterator){ index: BigInt => val row = session.execute(statement.bind(index: BigInteger)) if (row.asScala.nonEmpty) { val bytes = row.one().getBytes("value").array() diff --git a/docs/guide/module-hierarchy.rst b/docs/guide/module-hierarchy.rst index 114d813c08..7b2b6cbd05 100644 --- a/docs/guide/module-hierarchy.rst +++ b/docs/guide/module-hierarchy.rst @@ -90,7 +90,7 @@ store `GeoWave `__. *Provides:* ``geotrellis.spark.io.geowave.*`` -- Save and load ``RDD``\ s of features to and from GeoWave. +- Provides `GeoTrellisDataAdapter` to store GeoTrellis raster tiles and other Avro encoded records through GeoWave `DataTypeAdapter` interface. geotrellis-hbase ---------------- diff --git a/gdal-spark/src/test/scala/geotrellis/spark/gdal/GDALRasterSourceRDDSpec.scala b/gdal-spark/src/test/scala/geotrellis/spark/gdal/GDALRasterSourceRDDSpec.scala index 75509582c8..24fc40b29b 100644 --- a/gdal-spark/src/test/scala/geotrellis/spark/gdal/GDALRasterSourceRDDSpec.scala +++ b/gdal-spark/src/test/scala/geotrellis/spark/gdal/GDALRasterSourceRDDSpec.scala @@ -246,7 +246,7 @@ class GDALRasterSourceRDDSpec extends AnyFunSpec with TestEnvironment with Befor // println(Thread.currentThread().getName()) // Thread.sleep((Math.random() * 100).toLong) val lts = reprojRS(i) - lts.readAll(lts.keys.take(10).toIterator) + lts.readAll(lts.keys.take(10).iterator) reprojRS(i).source.resolutions dirtyCalls(reprojRS(i).source) @@ -254,7 +254,7 @@ class GDALRasterSourceRDDSpec extends AnyFunSpec with TestEnvironment with Befor // println(Thread.currentThread().getName()) // Thread.sleep((Math.random() * 100).toLong) val lts = reprojRS(i) - lts.readAll(lts.keys.take(10).toIterator) + lts.readAll(lts.keys.take(10).iterator) reprojRS(i).source.resolutions dirtyCalls(reprojRS(i).source) @@ -262,7 +262,7 @@ class GDALRasterSourceRDDSpec extends AnyFunSpec with TestEnvironment with Befor // println(Thread.currentThread().getName()) // Thread.sleep((Math.random() * 100).toLong) val lts = reprojRS(i) - lts.readAll(lts.keys.take(10).toIterator) + lts.readAll(lts.keys.take(10).iterator) reprojRS(i).source.resolutions dirtyCalls(reprojRS(i).source) diff --git a/geowave/Makefile b/geowave/Makefile new file mode 100644 index 0000000000..459c20107e --- /dev/null +++ b/geowave/Makefile @@ -0,0 +1,2 @@ +cqlsh: + docker exec -it $(FOLDER)_cassandra_1 cqlsh \ No newline at end of file diff --git a/geowave/README.md b/geowave/README.md new file mode 100644 index 0000000000..e6dccc79b5 --- /dev/null +++ b/geowave/README.md @@ -0,0 +1,55 @@ +# GeoTrellis/GeoWave Connector + +GeoTrellis/GeoWave connector for storing raster and volumetric data. + +- [GeoTrellis/GeoWave Connector](#geotrellisgeowave-connector) + - [Requirements](#requirements) + - [Project Inventory](#project-inventory) + - [Development](#development) + - [!Important](#important) + - [Executing Tests](#executing-tests) +## Requirements + +- Docker Engine 17.12+ +- Docker Compose 1.21+ +- OpenJDK 8 + +## Project Inventory + +- `src` - Main project with `GeoTrellisDataAdapter` enabling storing GeoTrellis types with GeoWave +- `benchmark` - Skeleton for microbenchmarks on GeoWave queries +- `docs` - Overview of GeoWave concepts relevant to index and data adapter usage + +## Development + +### !Important + +After merging PRs / fetching changes from master and other branches be sure that you _recreated_ +dev env. Any changes introduced into interfaces that are present in the `Persistable Registry` +and have `fromBinary` and `toBinary`methods can cause serialization / deserialization issues +in tests and as a consequence tests would fail with various of unpredictable runtime exceptions. + +### Executing Tests + +Tests are dependent on Apache Cassandra, Kafka, ZooKeeper, and Graphite with Grafana. First, ensure +these dependencies are running: + +```bash +docker-compose up -d cassandra +``` + +Now, you can execute tests from project root: + +```bash +$ ./sbt "project geowave" test +... +[info] All tests passed. +[success] Total time: 72 s, completed Nov 22, 2019 11:48:25 AM +``` + +When you're done, ensure that the services and networks created by Docker +Compose are torn down: + +```bash +docker-compose down +``` \ No newline at end of file diff --git a/geowave/benchmark/README.md b/geowave/benchmark/README.md new file mode 100644 index 0000000000..8e16674d87 --- /dev/null +++ b/geowave/benchmark/README.md @@ -0,0 +1,99 @@ +# JMH Benchmarks + +## Instructions + +1. Make the following cassandra changes: +```yaml +cassandra: + image: cassandra:3.11 + environment: + - MAX_HEAP_SIZE=4G + - HEAP_NEWSIZE=800M + - CASSANDRA_LISTEN_ADDRESS=127.0.0.1 + mem_limit: 8G + memswap_limit: -1 +``` +2. Ingest data into Cassandra via `sbt "project geowave-benchmark" run` +3. Run benchmarks via `jmh:run -i 5 -wi 5 -f1 -t1 .*QueryBenchmark.*` +It is recommend to run run benchmarks via `jmh:run -i 20 -wi 10 -f1 -t1 .*QueryBenchmark.*` +(to do at least 10 warm up iterations and 20 of actual iterations, just to get a bit more consistent results). + +## Results + +

+jmh:run -i 20 -wi 10 -f 1 -t 1 .*QueryBenchmark.*
+
+88 Entries
+Benchmark                                             Mode  Cnt  Score   Error  Units
+entireSpatialGeometryQuery                             avgt   20  5.278 ± 0.643   s/op
+entireSpatialQuery                                        avgt   20  1.155 ± 0.057   s/op
+entireSpatialTemporalElevationElevationQuery              avgt   20  1.145 ± 0.069   s/op
+entireSpatialTemporalElevationGeometryQuery               avgt   20  1.089 ± 0.030   s/op
+entireSpatialTemporalElevationGeometryTemporalElevationQuery  avgt   20  5.963 ± 0.358   s/op
+entireSpatialTemporalElevationGeometryTemporalQuery       avgt   20  1.093 ± 0.042   s/op
+entireSpatialTemporalElevationQuery                       avgt   20  1.117 ± 0.033   s/op
+entireSpatialTemporalElevationTemporalQuery               avgt   20  1.080 ± 0.029   s/op
+entireSpatialTemporalGeometryQuery                        avgt   20  1.117 ± 0.039   s/op
+entireSpatialTemporalGeometryTemporalQuery             avgt   20  4.223 ± 0.213   s/op
+entireSpatialTemporalQuery                                avgt   20  1.072 ± 0.036   s/op
+entireSpatialTemporalTemporalQuery                        avgt   20  1.110 ± 0.039   s/op
+
+328 Entries
+Benchmark                                             Mode  Cnt   Score   Error  Units
+entireSpatialGeometryQuery                            avgt   20   4.705 ± 0.146   s/op
+entireSpatialQuery                                       avgt   20   5.249 ± 0.503   s/op
+entireSpatialTemporalElevationElevationQuery             avgt   20   4.919 ± 0.310   s/op
+entireSpatialTemporalElevationGeometryQuery              avgt   20   4.688 ± 0.251   s/op
+entireSpatialTemporalElevationGeometryTemporalElevationQuery  avgt   20  15.801 ± 6.629   s/op
+entireSpatialTemporalElevationGeometryTemporalQuery      avgt   20   5.212 ± 0.467   s/op
+entireSpatialTemporalElevationQuery                      avgt   20   5.256 ± 1.107   s/op
+entireSpatialTemporalElevationTemporalQuery              avgt   20   4.878 ± 0.324   s/op
+entireSpatialTemporalGeometryQuery                       avgt   20   4.760 ± 0.498   s/op
+entireSpatialTemporalGeometryTemporalQuery            avgt   20   4.272 ± 0.126   s/op
+entireSpatialTemporalQuery                               avgt   20   4.553 ± 0.275   s/op
+entireSpatialTemporalTemporalQuery                       avgt   20   4.736 ± 0.290   s/op
+
+ +## Interpretation: + +The index type does affect the query performance. +The more dimensions there are defined for the index, the more ranges +would be generated for the SFC and the more range requests would be sent to Cassandra. +All ranged queries are marked as bold in benchmark results, all other benchmarks generate +full scan queries. + +Full scan by a three dimensional index is more expensive than by a single +or two dimensional index. The more dimensions SFC has, the more ranges would be generated. + +These benchmarks are not representative since were done with a local instance of Cassandra +and demonstrate only the local relative performance that shows how the Query performance +depends on the index type and the amount of data. In fact it is a Cassandra instance benchmark, +though it can give some general sense of how index and query types affect the performance. + +This benchmark measures in fact only full table scans (done via multiple ranged select queries or +via a single select). + +In the `entireSpatialTemporalElevationGeometryTemporalElevationQuery` case the results +are a bit high: too many range queries are generated and it is hard for a single Cassandra instance +to handle them. + +### Legend: +- `entireSpatial({Temporal|TemporalElevation})` performs a full table scan: + ```genericsql + SELECT * FROM QueryBench.indexName; + ``` +- In all cases where the query contains not all the index dimensions + (for instance a spatial query only from the spatial temporal indexed table), + GeoWave performs a full table scan: + ```genericsql + SELECT * FROM QueryBench.indexName; + ``` +- In all cases where the query contains all the index dimensions defined for the table, + GeoWave performs multiple ranged queries (number of SFC splits depends on the index dimensionality), + **benchmarks that generate such queries are marked as bold in the JMH report**: + ```genericsql + SELECT * FROM QueryBench.indexName + WHERE partition=:partition_val + AND adapter_id IN :adapter_id_val + AND sort>=:sort_min AND sort<:sort_max; + ``` \ No newline at end of file diff --git a/geowave/benchmark/src/main/resources/application.conf b/geowave/benchmark/src/main/resources/application.conf new file mode 100644 index 0000000000..206db2de56 --- /dev/null +++ b/geowave/benchmark/src/main/resources/application.conf @@ -0,0 +1,12 @@ +geotrellis.geowave.connection.store { + data-store-type = "cassandra" + options = { + "contactPoints": "localhost", + "contactPoints": ${?CASSANDRA_HOST}, + "gwNamespace" : "geotrellis" + } +} + +geotrellis.blocking-thread-pool { + threads = default +} diff --git a/geowave/benchmark/src/main/resources/logback.xml b/geowave/benchmark/src/main/resources/logback.xml new file mode 100644 index 0000000000..053890d2d2 --- /dev/null +++ b/geowave/benchmark/src/main/resources/logback.xml @@ -0,0 +1,31 @@ + + + + + + %white(%d{HH:mm:ss.SSS}) %highlight(%-5level) %cyan(%logger{50}) - %msg %n + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/geowave/benchmark/src/main/scala/geotrellis/geowave/BenchmarkEnvironment.scala b/geowave/benchmark/src/main/scala/geotrellis/geowave/BenchmarkEnvironment.scala new file mode 100644 index 0000000000..d615f01b19 --- /dev/null +++ b/geowave/benchmark/src/main/scala/geotrellis/geowave/BenchmarkEnvironment.scala @@ -0,0 +1,30 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave + +import cats.effect.{ContextShift, IO, Timer} +import geotrellis.store.util.BlockingThreadPool + +import scala.util.Properties + +trait BenchmarkEnvironment { + val kafka: String = Properties.envOrElse("KAFKA_HOST", "localhost:9092") + val cassandra: String = Properties.envOrElse("CASSANDRA_HOST", "localhost") + + implicit val contextShift: ContextShift[IO] = IO.contextShift(BlockingThreadPool.executionContext) + implicit val timer: Timer[IO] = IO.timer(BlockingThreadPool.executionContext) +} diff --git a/geowave/benchmark/src/main/scala/geotrellis/geowave/IngestBenchmarkData.scala b/geowave/benchmark/src/main/scala/geotrellis/geowave/IngestBenchmarkData.scala new file mode 100644 index 0000000000..4f04d0baf3 --- /dev/null +++ b/geowave/benchmark/src/main/scala/geotrellis/geowave/IngestBenchmarkData.scala @@ -0,0 +1,66 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave + +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.index.SpatialTemporalElevationIndexBuilder +import org.locationtech.geowave.core.geotime.index.api.{SpatialIndexBuilder, SpatialTemporalIndexBuilder} +import org.locationtech.geowave.core.store.api.{DataStoreFactory, Writer} +import org.locationtech.geowave.datastore.cassandra.config.{CassandraOptions, CassandraRequiredOptions} +import org.locationtech.geowave.datastore.cassandra.util.SessionPool +import cats.syntax.flatMap._ +import cats.syntax.parallel._ +import cats.instances.list._ +import cats.effect.IO +import geotrellis.geowave.adapter.geotiff.GeoTiffAdapter +import geotrellis.raster.io.geotiff.GeoTiff +import geotrellis.raster.MultibandTile +import geotrellis.store.util.BlockingThreadPool + +object IngestBenchmarkData extends BenchmarkEnvironment { + def main(args: Array[String]): Unit = { + val n: Int = args.headOption.map(_.toInt).getOrElse(20) + + val spatialIndex = new SpatialIndexBuilder().createIndex + val spatialTemporalIndex = new SpatialTemporalIndexBuilder().createIndex + val spatialTemporalDepthIndex = new SpatialTemporalElevationIndexBuilder().createIndex + + val dataTypeAdapter = new GeoTiffAdapter("QueryBench".typeName) + val geowaveDataStore = DataStoreFactory.createDataStore(new CassandraRequiredOptions(cassandra, "QueryBench", new CassandraOptions())) + geowaveDataStore.addType(dataTypeAdapter, spatialIndex, spatialTemporalIndex, spatialTemporalDepthIndex) + + val data: IO[List[GeoTiff[MultibandTile]]] = ??? + val result = (data >>= { tiles => + tiles.map { tile => IO { + val indexWriter: Writer[GeoTiff[MultibandTile]] = geowaveDataStore.createWriter(dataTypeAdapter.getTypeName) + try indexWriter.write(tile) finally if (indexWriter != null) indexWriter.close() + tile + } }.parSequence }).unsafeRunSync() + + val session = SessionPool.getInstance().getSession(cassandra) + val cluster = session.getCluster + session.close() + cluster.close() + BlockingThreadPool.pool.shutdown() + + println(Console.RED) + println("-----------------------------") + println(s"Ingested Items: ${result.length}") + println("-----------------------------") + println(Console.RESET) + } +} diff --git a/geowave/benchmark/src/main/scala/geotrellis/geowave/QueryBenchmark.scala b/geowave/benchmark/src/main/scala/geotrellis/geowave/QueryBenchmark.scala new file mode 100644 index 0000000000..7b736b261d --- /dev/null +++ b/geowave/benchmark/src/main/scala/geotrellis/geowave/QueryBenchmark.scala @@ -0,0 +1,240 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave + +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.index.SpatialTemporalElevationIndexBuilder +import geotrellis.geowave.index.dimension.ElevationDefinition +import geotrellis.geowave.index.query.ExplicitSpatialTemporalElevationQuery +import geotrellis.geowave.adapter.TypeName +import geotrellis.geowave.api._ +import geotrellis.vector._ +import geotrellis.raster.io.geotiff.GeoTiff +import geotrellis.raster.MultibandTile +import geotrellis.store.util.BlockingThreadPool + +import org.locationtech.geowave.core.geotime.index.api.{SpatialIndexBuilder, SpatialTemporalIndexBuilder} +import org.locationtech.geowave.core.store.api.{DataStore, DataStoreFactory, Index} +import org.locationtech.geowave.datastore.cassandra.config.{CassandraOptions, CassandraRequiredOptions} +import org.locationtech.geowave.core.geotime.store.query.{ExplicitSpatialQuery, ExplicitSpatialTemporalQuery} +import org.locationtech.geowave.core.store.CloseableIterator +import org.locationtech.geowave.datastore.cassandra.util.SessionPool +import org.locationtech.geowave.core.geotime.index.dimension.{SimpleTimeDefinition, TimeDefinition} +import org.locationtech.geowave.core.index.sfc.data.NumericRange +import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass +import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.{ConstraintData, ConstraintSet, ConstraintsByClass} +import org.openjdk.jmh.annotations._ + +import scala.collection.JavaConverters._ +import java.util.concurrent.TimeUnit +import java.time.{LocalDate, ZoneOffset} +import java.util.Date + +@BenchmarkMode(Array(Mode.AverageTime)) +@State(Scope.Benchmark) +@OutputTimeUnit(TimeUnit.SECONDS) +@Timeout(time = 10, timeUnit = TimeUnit.MINUTES) +class QueryBenchmark extends BenchmarkEnvironment { + var uri: String = _ + + var spatialIndex: Index = _ + var spatialTemporalIndex: Index = _ + var spatialTemporalElevationIndex: Index = _ + + var typeName: TypeName = _ + var geowaveDataStore: DataStore = _ + + var entireGeometry: Geometry = _ + + @Setup(Level.Trial) + def setupData(): Unit = { + spatialIndex = new SpatialIndexBuilder().createIndex + spatialTemporalIndex = new SpatialTemporalIndexBuilder().createIndex + spatialTemporalElevationIndex = new SpatialTemporalElevationIndexBuilder().createIndex + + typeName = "BenchType".typeName + geowaveDataStore = DataStoreFactory.createDataStore( + new CassandraRequiredOptions(cassandra, "BenchKeyspace", new CassandraOptions())) + } + + @TearDown(Level.Trial) + def tearDown(): Unit = { + val session = SessionPool.getInstance().getSession(cassandra) + val cluster = session.getCluster + session.close() + cluster.close() + BlockingThreadPool.pool.shutdown() + } + + private def indexQuery(indexName: String) = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(typeName.value) + .indexName(indexName) + + private def spatialQuery(indexName: String) = { + val sq = new ExplicitSpatialQuery(entireGeometry) + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(typeName.value) + .indexName(indexName) + .constraints(sq) + } + + private def spatialTemporalQuery(indexName: String) = { + val date = Date.from(LocalDate.of(2000, 1, 1).atStartOfDay.toInstant(ZoneOffset.UTC)) + val sq = new ExplicitSpatialTemporalQuery(date, date, entireGeometry) + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(typeName.value) + .indexName(indexName) + .constraints(sq) + } + + private def spatialTemporalElevationQuery(indexName: String) = { + val minDate = Date.from(LocalDate.ofYearDay(1970, 1).atStartOfDay(ZoneOffset.UTC).toInstant) + val maxDate = Date.from(LocalDate.ofYearDay(2010, 1).atStartOfDay(ZoneOffset.UTC).toInstant) + val minElevation = 0d + val maxElevation = 25000d + val sq = ExplicitSpatialTemporalElevationQuery(minElevation, maxElevation, minDate, maxDate, entireGeometry) + + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(typeName.value) + .indexName(indexName) + .constraints(sq) + } + + private def temporalQuery(indexName: String) = { + val timeRange = new NumericRange( + LocalDate.ofYearDay(1970, 1).atStartOfDay(ZoneOffset.UTC).toInstant.toEpochMilli.toDouble, + LocalDate.ofYearDay(2010, 1).atStartOfDay(ZoneOffset.UTC).toInstant.toEpochMilli.toDouble + ) + val tc = new BasicQueryByClass(new ConstraintsByClass( + new ConstraintSet( + new ConstraintData(timeRange, false), + classOf[TimeDefinition], + classOf[SimpleTimeDefinition] + ) + )) + + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(typeName.value) + .indexName(indexName) + .constraints(tc) + } + + private def elevationQuery(indexName: String) = { + val dc = new BasicQueryByClass(new ConstraintsByClass( + new ConstraintSet( + new ConstraintData(new NumericRange(0d, 25000d), false), + classOf[ElevationDefinition] + ) + )) + + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(typeName.value) + .indexName(indexName) + .constraints(dc) + } + + @Benchmark + def entireSpatialQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(indexQuery(spatialIndex.getName).build) + iter.asScala.toList + } + + @Benchmark + def entireSpatialTemporalQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(indexQuery(spatialTemporalIndex.getName).build) + iter.asScala.toList + } + + @Benchmark + def entireSpatialTemporalElevationQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(indexQuery(spatialTemporalElevationIndex.getName).build) + iter.asScala.toList + } + + @Benchmark + def entireSpatialGeometryQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(spatialQuery(spatialIndex.getName).build) + iter.asScala.toList + } + + @Benchmark + def entireSpatialTemporalGeometryQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(spatialQuery(spatialTemporalIndex.getName).build) + iter.asScala.toList + } + + @Benchmark + def entireSpatialTemporalElevationGeometryQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(spatialQuery(spatialTemporalElevationIndex.getName).build) + iter.asScala.toList + } + + @Benchmark + def entireSpatialTemporalGeometryTemporalQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(spatialTemporalQuery(spatialTemporalIndex.getName).build) + iter.asScala.toList + } + + @Benchmark + def entireSpatialTemporalElevationGeometryTemporalQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(spatialTemporalQuery(spatialTemporalElevationIndex.getName).build) + iter.asScala.toList + } + + @Benchmark + def entireSpatialTemporalElevationGeometryTemporalElevationQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(spatialTemporalElevationQuery(spatialTemporalElevationIndex.getName).build) + iter.asScala.toList + } + + @Benchmark + def entireSpatialTemporalTemporalQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(temporalQuery(spatialTemporalIndex.getName).build) + iter.asScala.toList + } + + @Benchmark + def entireSpatialTemporalElevationTemporalQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(temporalQuery(spatialTemporalElevationIndex.getName).build) + iter.asScala.toList + } + + @Benchmark + def entireSpatialTemporalElevationElevationQuery(): List[GeoTiff[MultibandTile]] = { + val iter: CloseableIterator[GeoTiff[MultibandTile]] = + geowaveDataStore.query(elevationQuery(spatialTemporalElevationIndex.getName).build) + iter.asScala.toList + } +} diff --git a/geowave/docker-compose.override.yml b/geowave/docker-compose.override.yml new file mode 100644 index 0000000000..7260463a52 --- /dev/null +++ b/geowave/docker-compose.override.yml @@ -0,0 +1,9 @@ +version: "2.4" +services: + cassandra: + ports: + - "7199:7199" + - "7000:7000" + - "7001:7001" + - "9160:9160" + - "9042:9042" diff --git a/geowave/docker-compose.yml b/geowave/docker-compose.yml new file mode 100755 index 0000000000..5b8d4d06aa --- /dev/null +++ b/geowave/docker-compose.yml @@ -0,0 +1,13 @@ +version: "2.4" +services: + cassandra: + image: cassandra:3.11 + environment: + - MAX_HEAP_SIZE=500M + - HEAP_NEWSIZE=100M + - CASSANDRA_LISTEN_ADDRESS=127.0.0.1 + mem_limit: 1G + memswap_limit: -1 + # Uncomment for benchmarks to avoid re-ingesting for each run + # volumes: + # - /data/cassandra:/var/lib/cassandra diff --git a/geowave/docs/README.md b/geowave/docs/README.md new file mode 100644 index 0000000000..cb9a8a4fa9 --- /dev/null +++ b/geowave/docs/README.md @@ -0,0 +1,173 @@ +# Index + +This document covers the concepts and practice behind creating a custom data adapter and index. + +## DataTypeAdapter + +GeoWave [DataTypeAdapter](https://locationtech.github.io/geowave/devguide.html#adapters) is responsible for + +* Encoding/Decoding records +* Extracting index values from records +* Extracting unique data id from records + + +In GeoWave, `Adapter` is an object that can extract index and data values from the input entry and can +also write / read data type to / from the data base. Each adapter has a list of `Field Handler`s. + +`Field handler` is a special function that extracts values from the entry. For example, it can extract `Extent` or `Time` from a `MultibandGeoTiff`. + +Each `Adapter` has a name, that should be unique per Cassandra `namespace`. +In this project all adapters extend [`GeoTrellisDataAdapter`] which implements some of the common behavior. + +### DataTypeAdapter Lifetime + +In order to use the adapter it needs to be bound with one or more instances of an index. + +```scala +val geowaveDataStore: DataStore = DataStoreFactory.createDataStore( + new CassandraRequiredOptions("cassandraHost", "my_keyspace", new CassandraOptions())) + +// Create an adapter that can extract Spatial, Temporal and Elevation values from GeoTiff for indexing. +// The adapter also has a name ("GeoTiffAdapterSpec"), which should be unique in the Cassandra namespace. +val dataTypeAdapter = new GeoTiffAdapater("GeoTiffTiles".typeName) +val index: Index = new SpatialDimensionalityTypeProvider.SpatialIndexBuilder().createIndex + +// Add the newly constructed adapter to the data store. The adapter along with all handlers, primitives, +// and the index, will be serialized and stored in the DB. +dataStore.addType(adapter, index) + +val writer = dataStore.createWriter[GeoTiff[MultibandTile]]("GeoTiffTiles") +val tiff: GeoTiff[MultibandTile] = ??? +writer.write(tiff) +writer.close() +``` + +During the call to `addType` method the adapter and provided indices are serialized and stored to the database. +From that point on they are functionally immutable. Both readers and writers are created from the type name. +That process reads the serialized adapter from the database and uses it to produce reader/writer instances. + +Each index is associated with a table in cassandra. Writing to given type will result in duplicate records being written to each index table. +During query the appropriate table will be selected based on query parameters. + +When multiple types are using the same index they are written to the same table. +That is a single GeoWave table is able to contain data from multiple adapters. + +#### The internal DB structure + +The structures in this section are generated by the command above. +As the result, the following structure in Cassandra would be generated: + +```sh +docker-compose exec cassandra cqlsh +``` + +``` +cqlsh> describe tables; + +Keyspace my_keyspace +--------------------------- +index_geowave_metadata sp_temp_dpt_idx_balanced_year_2147483647 +aim_geowave_metadata internal_adapter_geowave_metadata +adapter_geowave_metadata +``` + +- `index_geowave_metadata` - stores `Index` instances +- `aim_geowave_metadata` - stores `Adapter` to `Index` mapping +- `adapter_geowave_metadata` - stores `Adapter` instances +- `sp_temp_dpt_idx_balanced_year_2147483647` - a table created for the index; all records that are indexed using this index would be placed into this table. +- `internal_adapter_geowave_metadata` - stores `Adapter Internal Adapter Mappings` within an Cassandra table for GeoWave metadata + +The conclusion: + +1. Index is unique and is created based on the input options by default. It has a table that is associated with this index (`sp_temp_dpt_idx_balanced_year_2147483647`). + +2. The default index name is generated based on the input index configuration. For example, `sp_temp_dpt_idx_balanced_year_2147483647` means a spatial temporal index with a per year binning and a max elevation = 2147483647). It is possible to override the default name with any custom unique string. + +3. Both Index and Adapter instances are serialized and stored in the DB. This is a design constraint from GeoWave which forces a single adapter and index per table. Once the index table is created it cannot be changed. + +4. An `Adapter` can be associated with more than one `Index`. `Adapter Name` refers to both an _adapter_ and all _indexes it can operate with_. + +5. Adapter and all the indices related to it are called `type` in GeoWave. + +6. All data ingested using different `adapters` but the _same_ `index` would be stored in the same table. + +Note that in the following sections of this document, `typeName` is the adapter name which references both the data type and all the indices related to it. + +The `sp_temp_dpt_idx_balanced_year_2147483647` table schema looks like this: + +``` +partition | adapter_id | data_id | .. other configurable index fields .. | value +``` + +Here `partition` means a Space Filling Curve (SFC) index, `adapter_id` means the adapter id, +`data_id` is the data identifier that is required to perform a secondary filtering in case +the SFC is not precise enough, and `value` is the actual value stored. + +### TypeName + +Each `DataTypeAdapter` instance is identified by its `TypeName`. +As seen above a type actually refers to combination of adapter, indices and their configuration. +The name used is purely an identifier and does not have any restrictions. + +### DataId + +`DataTypeAdapter` must be able to produce a data id as `Array[Byte]` for records it supports. +This data id must be unique per-type, per-table. +While writing records to cassandra with same id will produce additional rows they will be de-duplicated during query time. + +### Field Handlers + +In order to for your data type to be indexed there has to be a way to extract the position of the record with regard to the index dimensions, field handlers provide this functionality. +The matching happens by field name. Each extracted coordinate is stored in `CommonIndexModel` and need not be stored again as part of the field encoding if it is not useful. + +### PersistableRegistrySpi + +Each adapter and each handler must be registered with GeoWave `PersistableRegistrySpi`. +Failure to do so will result in various null pointer exceptions when reading or writing with that adapter. +All instances are persisted when first registered and are subsequently loaded from there each time a reader/writer is created. + + +## Index + +GeoWave [indices](https://locationtech.github.io/geowave/devguide.html#indices) provide the core mechanism to store and query multidimensional data. The index implementation is abstracted from `DataTypeAdapter` such that it is possible to use single implementation of an index with multiple data types. + +### DimensionalityTypeProviderSpi + +When creating a new index you should implement `DimensionalityTypeProviderSpi` interface. +This interface provides both the plugin discoverability for index as well as a way to configure it. +A range of functionality and configuration is provided. +Best way to dive into it is to inspect the `createIndex` method in existing implementations of this SPI. + +See: `SphericalIndexTypeProvider.scala` + +### NumericDimensionDefinition + +An index must provide a list of dimensions that it will range over. +For most "extra" dimensions `BasicDimensionDefinition` is sufficient. + +See: `ElevationDefinition.scala` + +### NumericDimensionField + +DimensionField is what provides the `IndexModel` with a way to read/write field values, it relies on `DimensionDefinition`. +Critically it is identified by its `fieldName`. The field name must match the field name of data type adapter field handler. +This is the core binding between implementations of `DataTypeAdapter` and `Index`. +The exact mechanism can be seen in the implementation of `GeoTrellisDataAdapter.encode`. + +See: `ElevationField.scala` + +### Name + +Index name must include enough information to avoid conflicts in its configuration. +Index configuration allows configuring things like dimension precision and value range. +These parameters effect the translation of dimension field values to their SFC encoded values. +Note that SFC values across different parameter values are not comparable. +In order to avid such conflicts the index name is used as the table name. + +### Options + +`DimensionalityTypeProviderSpi` is specified in conjunction with `CommonSpatialOptions`. +These serve double duty as command line option parser for GeoWave CLI, but are usable in other context. +Existing spatial and temporal indices have wealth of configurations that should be explored by the user. + +See: [CommonIndexOptions](https://github.com/locationtech/geowave/blob/v1.0.0/core/geotime/src/main/java/org/locationtech/geowave/core/geotime/ingest/CommonSpatialOptions.java) diff --git a/geowave/docs/schemas/delete-message.md b/geowave/docs/schemas/delete-message.md new file mode 100644 index 0000000000..8d89f3d41c --- /dev/null +++ b/geowave/docs/schemas/delete-message.md @@ -0,0 +1,203 @@ +## The Root Schema Type + +`object` ([The Root Schema](delete-message.md)) + +# The Root Schema Properties + +| Property | Type | Required | Nullable | Defined by | +| :---------------------- | -------- | -------- | -------------- | :----------------------------------------------------------------------------------------------------------------------------- | +| [typeName](#typeName) | `string` | Required | cannot be null | [The Root Schema](delete-message-properties-the-typename-schema.md "\#/properties/typeName#/properties/typeName") | +| [indexName](#indexName) | `string` | Required | cannot be null | [The Root Schema](delete-message-properties-the-indexname-schema.md "\#/properties/indexName#/properties/indexName") | +| [geometry](#geometry) | `object` | Optional | can be null | [The Root Schema](delete-message-properties-the-geometry-schema.md "\#/properties/geometry#/properties/geometry") | +| [time](#time) | `object` | Optional | can be null | [The Root Schema](delete-message-properties-the-time-schema.md "\#/properties/time#/properties/time") | +| [elevation](#elevation) | `object` | Optional | can be null | [The Root Schema](delete-message-properties-the-elevation-schema.md "\#/properties/elevation#/properties/elevation") | +| [compareOp](#compareOp) | `string` | Optional | can be null | [The Root Schema](delete-message-properties-the-compareop-schema.md "\#/properties/compareOp#/properties/compareOp") | +| [namespace](#namespace) | `string` | Optional | can be null | [The Root Schema](delete-message-properties-storage-ie-cassandra-namespace.md "\#/properties/namespace#/properties/namespace") | + +## typeName + +A name that is used to identify DataType & IndexType it should be the same as it was configured in the IndexMessage + + +`typeName` + +- is required +- Type: `string` ([The typeName Schema](delete-message-properties-the-typename-schema.md)) +- cannot be null +- defined in: [The Root Schema](delete-message-properties-the-typename-schema.md "\#/properties/typeName#/properties/typeName") + +### typeName Type + +`string` ([The typeName Schema](delete-message-properties-the-typename-schema.md)) + +### typeName Constraints + +**pattern**: the string must match the following regular expression: + +```regexp +^(.*)$ +``` + +[try pattern](https://regexr.com/?expression=%5E(.*)%24 "try regular expression with regexr.com") + +### typeName Examples + +```json +"GeoTiffType" +``` + +## indexName + +An index name. Index name would be inferred from indexOptions and indexType by default during the IndexMessage process. + + +`indexName` + +- is required +- Type: `string` ([The indexName Schema](delete-message-properties-the-indexname-schema.md)) +- cannot be null +- defined in: [The Root Schema](delete-message-properties-the-indexname-schema.md "\#/properties/indexName#/properties/indexName") + +### indexName Type + +`string` ([The indexName Schema](delete-message-properties-the-indexname-schema.md)) + +## geometry + +GeoJSON of a Geometry (Point, Polygon, MultiPolygon, etc) type. + + +`geometry` + +- is optional +- Type: `object` ([The Geometry Schema](delete-message-properties-the-geometry-schema.md)) +- can be null +- defined in: [The Root Schema](delete-message-properties-the-geometry-schema.md "\#/properties/geometry#/properties/geometry") + +### geometry Type + +`object` ([The Geometry Schema](delete-message-properties-the-geometry-schema.md)) + +## time + +The time range definition in milliseconds with ISO strings. + + +`time` + +- is optional +- Type: `object` ([The Time Schema](delete-message-properties-the-time-schema.md)) +- can be null +- defined in: [The Root Schema](delete-message-properties-the-time-schema.md "\#/properties/time#/properties/time") + +### time Type + +`object` ([The Time Schema](delete-message-properties-the-time-schema.md)) + +## elevation + +The elevation range definition in ingested units. + + +`elevation` + +- is optional +- Type: `object` ([The elevation Schema](delete-message-properties-the-elevation-schema.md)) +- can be null +- defined in: [The Root Schema](delete-message-properties-the-elevation-schema.md "\#/properties/elevation#/properties/elevation") + +### elevation Type + +`object` ([The elevation Schema](delete-message-properties-the-elevation-schema.md)) + +## compareOp + +Type of query: intersection, inclusion, etc. + + +`compareOp` + +- is optional +- Type: `string` ([The Compareop Schema](delete-message-properties-the-compareop-schema.md)) +- can be null +- defined in: [The Root Schema](delete-message-properties-the-compareop-schema.md "\#/properties/compareOp#/properties/compareOp") + +### compareOp Type + +`string` ([The Compareop Schema](delete-message-properties-the-compareop-schema.md)) + +### compareOp Constraints + +**pattern**: the string must match the following regular expression: + +```regexp +^(.*)$ +``` + +[try pattern](https://regexr.com/?expression=%5E(.*)%24 "try regular expression with regexr.com") + +### compareOp Default Value + +The default value is: + +```json +"INTERSECTS" +``` + +### compareOp Examples + +```json +"CONTAINS" +``` + +```json +"OVERLAPS" +``` + +```json +"INTERSECTS" +``` + +```json +"TOUCHES" +``` + +```json +"WITHIN" +``` + +```json +"DISJOINT" +``` + +```json +"CROSSES" +``` + +```json +"EQUALS" +``` + +## namespace + +A namespace to refer during the connections establishment + + +`namespace` + +- is optional +- Type: `string` ([Storage (i.e. Cassandra) namespace](delete-message-properties-storage-ie-cassandra-namespace.md)) +- can be null +- defined in: [The Root Schema](delete-message-properties-storage-ie-cassandra-namespace.md "\#/properties/namespace#/properties/namespace") + +### namespace Type + +`string` ([Storage (i.e. Cassandra) namespace](delete-message-properties-storage-ie-cassandra-namespace.md)) + +### namespace Constraints + +**pattern**: the string must match the following regular expression: + +```regexp +^(.*)$ +``` diff --git a/geowave/docs/schemas/index-message.md b/geowave/docs/schemas/index-message.md new file mode 100644 index 0000000000..1cc61c322c --- /dev/null +++ b/geowave/docs/schemas/index-message.md @@ -0,0 +1,116 @@ +## IndexMessage Type + +`object` ([IndexMessage](index-message.md)) + +# IndexMessage Properties + +| Property | Type | Required | Nullable | Defined by | +| :---------------------- | -------- | -------- | -------------- | :------------------------------------------------------------------------------------------------------------------------- | +| [indices](#indices) | `array` | Required | cannot be null | [IndexMessage](index-message-properties-the-indices-schema.md "\#/properties/indices#/properties/indices") | +| [typeName](#typeName) | `string` | Required | cannot be null | [IndexMessage](index-message-properties-the-typename-schema.md "\#/properties/typeName#/properties/typeName") | +| [dataType](#dataType) | `string` | Required | cannot be null | [IndexMessage](index-message-properties-the-datatype-schema.md "\#/properties/dataType#/properties/dataType") | +| [namespace](#namespace) | `string` | Optional | can be null | [IndexMessage](index-message-properties-storage-ie-cassandra-namespace.md "\#/properties/namespace#/properties/namespace") | + +## indices + + + + +`indices` + +- is required +- Type: `object[]` ([The Items Schema](index-message-properties-the-indices-schema-the-items-schema.md)) +- cannot be null +- defined in: [IndexMessage](index-message-properties-the-indices-schema.md "\#/properties/indices#/properties/indices") + +### indices Type + +`object[]` ([The Items Schema](index-message-properties-the-indices-schema-the-items-schema.md)) + +## typeName + +An arbitrary name that would be used to identify DataType & IndexType and will be used in the ingest message + + +`typeName` + +- is required +- Type: `string` ([The typeName Schema](index-message-properties-the-typename-schema.md)) +- cannot be null +- defined in: [IndexMessage](index-message-properties-the-typename-schema.md "\#/properties/typeName#/properties/typeName") + +### typeName Type + +`string` ([The typeName Schema](index-message-properties-the-typename-schema.md)) + +### typeName Constraints + +**pattern**: the string must match the following regular expression: + +```regexp +^(.*)$ +``` + +[try pattern](https://regexr.com/?expression=%5E(.*)%24 "try regular expression with regexr.com") + +### typeName Examples + +```json +"GeoTiffType" +``` + +## dataType + +A dataType this index would be used for + + +`dataType` + +- is required +- Type: `string` ([The dataType Schema](index-message-properties-the-datatype-schema.md)) +- cannot be null +- defined in: [IndexMessage](index-message-properties-the-datatype-schema.md "\#/properties/dataType#/properties/dataType") + +### dataType Type + +`string` ([The dataType Schema](index-message-properties-the-datatype-schema.md)) + +### dataType Constraints + +**pattern**: the string must match the following regular expression: + +```regexp +^(.*)$ +``` + +[try pattern](https://regexr.com/?expression=%5E(.*)%24 "try regular expression with regexr.com") + +### dataType Examples + +```json +"GEOTIFF" +``` + +## namespace + +A namespace to refer during the connections establishment + + +`namespace` + +- is optional +- Type: `string` ([Storage (i.e. Cassandra) namespace](index-message-properties-storage-ie-cassandra-namespace.md)) +- can be null +- defined in: [IndexMessage](index-message-properties-storage-ie-cassandra-namespace.md "\#/properties/namespace#/properties/namespace") + +### namespace Type + +`string` ([Storage (i.e. Cassandra) namespace](index-message-properties-storage-ie-cassandra-namespace.md)) + +### namespace Constraints + +**pattern**: the string must match the following regular expression: + +```regexp +^(.*)$ +``` diff --git a/geowave/docs/schemas/ingest-message.md b/geowave/docs/schemas/ingest-message.md new file mode 100644 index 0000000000..cd5d53393d --- /dev/null +++ b/geowave/docs/schemas/ingest-message.md @@ -0,0 +1,149 @@ +## IngestMessage Type + +`object` ([IngestMessage](ingest-message.md)) + +# IngestMessage Properties + +| Property | Type | Required | Nullable | Defined by | +| :---------------------- | -------- | -------- | -------------- | :--------------------------------------------------------------------------------------------------------------------------- | +| [typeName](#typeName) | `string` | Required | cannot be null | [IngestMessage](ingest-message-properties-the-typename-schema.md "\#/properties/typeName#/properties/typeName") | +| [dataType](#dataType) | `string` | Required | cannot be null | [IngestMessage](ingest-message-properties-the-datatype-schema.md "\#/properties/dataType#/properties/dataType") | +| [uri](#uri) | `string` | Required | cannot be null | [IngestMessage](ingest-message-properties-the-uri-schema.md "\#/properties/uri#/properties/uri") | +| [options](#options) | `object` | Optional | can be null | [IngestMessage](ingest-message-properties-the-ingest-options.md "\#/properties/options#/properties/options") | +| [namespace](#namespace) | `string` | Optional | can be null | [IngestMessage](ingest-message-properties-storage-ie-cassandra-namespace.md "\#/properties/namespace#/properties/namespace") | + +## typeName + +A name that is used to identify DataType & IndexType it should be the same as it was configured in the IndexMessage + + +`typeName` + +- is required +- Type: `string` ([The typeName Schema](ingest-message-properties-the-typename-schema.md)) +- cannot be null +- defined in: [IngestMessage](ingest-message-properties-the-typename-schema.md "\#/properties/typeName#/properties/typeName") + +### typeName Type + +`string` ([The typeName Schema](ingest-message-properties-the-typename-schema.md)) + +### typeName Constraints + +**pattern**: the string must match the following regular expression: + +```regexp +^(.*)$ +``` + +[try pattern](https://regexr.com/?expression=%5E(.*)%24 "try regular expression with regexr.com") + +### typeName Examples + +```json +"GeoTiffType" +``` + +## dataType + +A dataType the ingest would happen for + + +`dataType` + +- is required +- Type: `string` ([The dataType Schema](ingest-message-properties-the-datatype-schema.md)) +- cannot be null +- defined in: [IngestMessage](ingest-message-properties-the-datatype-schema.md "\#/properties/dataType#/properties/dataType") + +### dataType Type + +`string` ([The dataType Schema](ingest-message-properties-the-datatype-schema.md)) + +### dataType Constraints + +**pattern**: the string must match the following regular expression: + +```regexp +^(.*)$ +``` + +[try pattern](https://regexr.com/?expression=%5E(.*)%24 "try regular expression with regexr.com") + +### dataType Examples + +```json +"GEOTIFF" +``` + +## uri + +Path to a file + + +`uri` + +- is required +- Type: `string` ([The uri Schema](ingest-message-properties-the-uri-schema.md)) +- cannot be null +- defined in: [IngestMessage](ingest-message-properties-the-uri-schema.md "\#/properties/uri#/properties/uri") + +### uri Type + +`string` ([The uri Schema](ingest-message-properties-the-uri-schema.md)) + +### uri Constraints + +**pattern**: the string must match the following regular expression: + +```regexp +^(.*)$ +``` + +[try pattern](https://regexr.com/?expression=%5E(.*)%24 "try regular expression with regexr.com") + +### uri Examples + +```json +"file://path/to/file" +``` + +## options + +Data type specific options + + +`options` + +- is optional +- Type: `object` ([The Ingest Options](ingest-message-properties-the-ingest-options.md)) +- can be null +- defined in: [IngestMessage](ingest-message-properties-the-ingest-options.md "\#/properties/options#/properties/options") + +### options Type + +`object` ([The Ingest Options](ingest-message-properties-the-ingest-options.md)) + +## namespace + +A namespace to refer during the connections establishment + + +`namespace` + +- is optional +- Type: `string` ([Storage (i.e. Cassandra) namespace](ingest-message-properties-storage-ie-cassandra-namespace.md)) +- can be null +- defined in: [IngestMessage](ingest-message-properties-storage-ie-cassandra-namespace.md "\#/properties/namespace#/properties/namespace") + +### namespace Type + +`string` ([Storage (i.e. Cassandra) namespace](ingest-message-properties-storage-ie-cassandra-namespace.md)) + +### namespace Constraints + +**pattern**: the string must match the following regular expression: + +```regexp +^(.*)$ +``` diff --git a/geowave/src/main/resources/META-INF/services/geotrellis.geowave.adapter.GeoTrellisDataAdapterProvider b/geowave/src/main/resources/META-INF/services/geotrellis.geowave.adapter.GeoTrellisDataAdapterProvider new file mode 100644 index 0000000000..3953a8f902 --- /dev/null +++ b/geowave/src/main/resources/META-INF/services/geotrellis.geowave.adapter.GeoTrellisDataAdapterProvider @@ -0,0 +1 @@ +geotrellis.geowave.adapter.geotiff.GeoTiffAdapterProvider diff --git a/geowave/src/main/resources/META-INF/services/geotrellis.geowave.adapter.HandlersRegistryProvider b/geowave/src/main/resources/META-INF/services/geotrellis.geowave.adapter.HandlersRegistryProvider new file mode 100644 index 0000000000..c8cf826f46 --- /dev/null +++ b/geowave/src/main/resources/META-INF/services/geotrellis.geowave.adapter.HandlersRegistryProvider @@ -0,0 +1 @@ +geotrellis.geowave.adapter.geotiff.GeoTiffHandlersRegistryProvider diff --git a/geowave/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi b/geowave/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi new file mode 100644 index 0000000000..13ceecdf30 --- /dev/null +++ b/geowave/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi @@ -0,0 +1 @@ +geotrellis.geowave.GeoTrellisPersistableRegistry diff --git a/geowave/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi b/geowave/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi new file mode 100644 index 0000000000..4aaf5c43e2 --- /dev/null +++ b/geowave/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi @@ -0,0 +1 @@ +geotrellis.geowave.index.SpatialTemporalElevationIndexTypeProvider diff --git a/geowave/src/main/resources/json/delete-message.schema.json b/geowave/src/main/resources/json/delete-message.schema.json new file mode 100644 index 0000000000..d7bb7cd743 --- /dev/null +++ b/geowave/src/main/resources/json/delete-message.schema.json @@ -0,0 +1,108 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "delete-message.schema.json", + "type": "object", + "title": "The Root Schema", + "required": [ + "typeName", + "indexName" + ], + "properties": { + "typeName": { + "$id": "#/properties/typeName", + "type": "string", + "title": "The typeName Schema", + "description" : "A name that is used to identify DataType & IndexType it should be the same as it was configured in the IndexMessage", + "examples": [ + "GeoTiffType" + ], + "pattern": "^(.*)$" + }, + "indexName": { + "$id": "#/properties/indexName", + "type": "string", + "title": "The indexName Schema", + "description" : "An index name. Index name would be inferred from indexOptions and indexType by default during the IndexMessage process." + }, + "geometry": { + "$id": "#/properties/geometry", + "type": ["object", "null"], + "title": "The Geometry Schema", + "description" : "GeoJSON of a Geometry (Point, Polygon, MultiPolygon, etc) type.", + "required": [ + "type", + "coordinates" + ] + }, + "time": { + "$id": "#/properties/time", + "type": ["object", "null"], + "title": "The Time Schema", + "description" : "The time range definition in milliseconds with ISO strings.", + "required": [ + "min", + "max" + ], + "properties": { + "min": { + "$id": "#/properties/time/properties/min", + "type": "string", + "title": "The Min Schema" + }, + "max": { + "$id": "#/properties/time/properties/max", + "type": "string", + "title": "The Max Schema" + } + } + }, + "elevation": { + "$id": "#/properties/elevation", + "type": ["object", "null"], + "title": "The elevation Schema", + "description" : "The elevation range definition in ingested units.", + "required": [ + "min", + "max" + ], + "properties": { + "min": { + "$id": "#/properties/elevation/properties/min", + "type": "integer", + "title": "The Min Schema" + }, + "max": { + "$id": "#/properties/elevation/properties/max", + "type": "integer", + "title": "The Max Schema" + } + } + }, + "compareOp": { + "$id": "#/properties/compareOp", + "type": ["string", "null"], + "title": "The Compareop Schema", + "description" : "Type of query: intersection, inclusion, etc.", + "default": "INTERSECTS", + "examples": [ + "CONTAINS", + "OVERLAPS", + "INTERSECTS", + "TOUCHES", + "WITHIN", + "DISJOINT", + "CROSSES", + "EQUALS" + ], + "pattern": "^(.*)$" + }, + "namespace": { + "$id": "#/properties/namespace", + "type": ["string", "null"], + "title": "Storage (i.e. Cassandra) namespace", + "description" : "A namespace to refer during the connections establishment", + "pattern": "^(.*)$" + } + } +} diff --git a/geowave/src/main/resources/json/index-message.schema.json b/geowave/src/main/resources/json/index-message.schema.json new file mode 100644 index 0000000000..3687834a63 --- /dev/null +++ b/geowave/src/main/resources/json/index-message.schema.json @@ -0,0 +1,82 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "index-message.schema.json", + "type": "object", + "title": "IndexMessage", + "description" : "A description of the IndexMessage that is used to configure indices used in ingests.", + "required": [ + "indices", + "typeName", + "dataType" + ], + "properties": { + "indices": { + "$id": "#/properties/indices", + "type": "array", + "title": "The indices Schema", + "items": { + "$id": "#/properties/indices/items", + "type": "object", + "title": "The Items Schema", + "required": [ + "indexType", + "indexOptions" + ], + "properties": { + "indexName": { + "$id": "#/properties/indexName", + "type": ["string", "null"], + "title": "The indexName Schema", + "description" : "An index name. It is an optional name, index name would be inferred from indexOptions and indexType by default." + }, + "indexType": { + "$id": "#/properties/indexType", + "type": "string", + "title": "The indexType Schema", + "description" : "An index type", + "examples": [ + "spatial", + "spatial-temporal", + "spatial-temporal-depth" + ], + "pattern": "^(.*)$" + }, + "indexOptions": { + "$id": "#/properties/indexOptions", + "type": "object", + "title": "The indexOptions Schema", + "description" : "A dictionary with values that are dependent on the each IndexType." + } + } + } + }, + "typeName": { + "$id": "#/properties/typeName", + "type": "string", + "title": "The typeName Schema", + "description" : "An arbitrary name that would be used to identify DataType & IndexType and will be used in the ingest message", + "examples": [ + "GeoTiffType" + ], + "pattern": "^(.*)$" + }, + "dataType": { + "$id": "#/properties/dataType", + "type": "string", + "title": "The dataType Schema", + "description" : "A dataType this index would be used for", + "examples": [ + "GEOTIFF" + ], + "pattern": "^(.*)$" + }, + "namespace": { + "$id": "#/properties/namespace", + "type": ["string", "null"], + "title": "Storage (i.e. Cassandra) namespace", + "description" : "A namespace to refer during the connections establishment", + "pattern": "^(.*)$" + } + } +} \ No newline at end of file diff --git a/geowave/src/main/resources/json/ingest-message.schema.json b/geowave/src/main/resources/json/ingest-message.schema.json new file mode 100644 index 0000000000..35bf7483f6 --- /dev/null +++ b/geowave/src/main/resources/json/ingest-message.schema.json @@ -0,0 +1,61 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "ingest-message.schema.json", + "type": "object", + "title": "IngestMessage", + "description" : "A description of the IngestMessage that is used to configure the ingest.", + "required": [ + "typeName", + "dataType", + "uri" + ], + "properties": { + "typeName": { + "$id": "#/properties/typeName", + "type": "string", + "title": "The typeName Schema", + "description" : "A name that is used to identify DataType & IndexType it should be the same as it was configured in the IndexMessage", + "examples": [ + "GeoTiffType" + ], + "pattern": "^(.*)$" + }, + "dataType": { + "$id": "#/properties/dataType", + "type": "string", + "title": "The dataType Schema", + "description" : "A dataType the ingest would happen for", + "examples": [ + "GEOTIFF" + ], + "pattern": "^(.*)$" + }, + "uri": { + "$id": "#/properties/uri", + "type": "string", + "title": "The uri Schema", + "description": "Path to a file", + "examples": [ + "file://path/to/file" + ], + "pattern": "^(.*)$" + }, + "options": { + "$id": "#/properties/options", + "type": ["object", "null"], + "title": "The Ingest Options", + "description": "Data type specific options", + "properties": {}, + "additionalProperties": true + }, + "namespace": { + "$id": "#/properties/namespace", + "type": ["string", "null"], + "title": "Storage (i.e. Cassandra) namespace", + "description" : "A namespace to refer during the connections establishment", + "pattern": "^(.*)$" + } + }, + "additionalProperties": false +} \ No newline at end of file diff --git a/geowave/src/main/resources/json/message.schema.json b/geowave/src/main/resources/json/message.schema.json new file mode 100644 index 0000000000..3802ad032c --- /dev/null +++ b/geowave/src/main/resources/json/message.schema.json @@ -0,0 +1,41 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "message.schema.json", + "type": "object", + "title": "Message", + "description" : "A description of the Message that is that is common", + "required": [ + "typeName", + "dataType" + ], + "properties": { + "typeName": { + "$id": "#/properties/typeName", + "type": "string", + "title": "The typeName Schema", + "description" : "An arbitrary name that would be used to identify DataType & IndexType and will be used in the ingest message", + "examples": [ + "GeoTiffType" + ], + "pattern": "^(.*)$" + }, + "dataType": { + "$id": "#/properties/dataType", + "type": "string", + "title": "The dataType Schema", + "description" : "A dataType this index would be used for", + "examples": [ + "GEOTIFF" + ], + "pattern": "^(.*)$" + }, + "namespace": { + "$id": "#/properties/namespace", + "type": ["string", "null"], + "title": "Storage (i.e. Cassandra) namespace", + "description" : "A namespace to refer during the connections establishment", + "pattern": "^(.*)$" + } + } +} \ No newline at end of file diff --git a/geowave/src/main/resources/reference.conf b/geowave/src/main/resources/reference.conf new file mode 100644 index 0000000000..e45286ef87 --- /dev/null +++ b/geowave/src/main/resources/reference.conf @@ -0,0 +1,12 @@ +geotrellis.raster.gdal { + acceptable-datasets = ["SOURCE", "WARPED"] + number-of-attempts = 1048576 +} + +geotrellis.geowave.connection.store { + data-store-type = "cassandra" + options = { + "contactPoints": "localhost", + "gwNamespace" : "geotrellis" + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/GeoTrellisPersistableRegistry.scala b/geowave/src/main/scala/geotrellis/geowave/GeoTrellisPersistableRegistry.scala new file mode 100644 index 0000000000..536b6d9aa7 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/GeoTrellisPersistableRegistry.scala @@ -0,0 +1,60 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave + +import geotrellis.geowave.adapter.geotiff.GeoTiffAdapter +import geotrellis.geowave.adapter.raster.MulitbandRasterAdapter +import geotrellis.geowave.index.dimension.{Elevation, ElevationDefinition} +import geotrellis.geowave.index.field.ElevationField +import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi +import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi.PersistableIdAndConstructor + +/** + * To be sure that we didn't forget to register classes it is possible to use the following test class from the GeoWave tests: + * https://github.com/locationtech/geowave/blob/v1.0.0/core/store/src/test/java/org/locationtech/geowave/core/store/TestStorePersistableRegistry.java + * + * GeoWave has a complete test example of creating custom index with custom dimensions. + */ +class GeoTrellisPersistableRegistry extends PersistableRegistrySpi { + protected val INITIAL_ID_HANDLERS: Short = 4000 + protected val INITIAL_ID_ADAPTERS: Short = 5000 + protected val INITIAL_ID_DIMENSIONS: Short = 6000 + protected val INITIAL_ID_FIELDS: Short = 7000 + protected val INITIAL_ID_DEFINITION: Short = 8000 + + def getSupportedPersistables: Array[PersistableRegistrySpi.PersistableIdAndConstructor] = + Array( + /** Handlers */ + new PersistableIdAndConstructor(INITIAL_ID_HANDLERS, () => new GeoTiffAdapter.GeometryHandler()), + new PersistableIdAndConstructor((INITIAL_ID_HANDLERS + 1).toShort, () => new MulitbandRasterAdapter.GeometryHandler()), + new PersistableIdAndConstructor((INITIAL_ID_HANDLERS + 2).toShort, () => new GeoTiffAdapter.TimestampHandler()), + new PersistableIdAndConstructor((INITIAL_ID_HANDLERS + 3).toShort, () => new GeoTiffAdapter.ElevationHandler()), + + /** Adapters */ + new PersistableIdAndConstructor(INITIAL_ID_ADAPTERS, () => new GeoTiffAdapter()), + new PersistableIdAndConstructor((INITIAL_ID_ADAPTERS + 1).toShort, () => new MulitbandRasterAdapter()), + + /** Index dimensions */ + new PersistableIdAndConstructor(INITIAL_ID_DIMENSIONS, () => new Elevation()), + + /** Index dimension fields */ + new PersistableIdAndConstructor(INITIAL_ID_FIELDS, () => new ElevationField()), + + /** Index dimension definition */ + new PersistableIdAndConstructor(INITIAL_ID_DEFINITION, () => new ElevationDefinition(0, 320000)) + ) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/Implicits.scala b/geowave/src/main/scala/geotrellis/geowave/Implicits.scala new file mode 100644 index 0000000000..0b2f157f38 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/Implicits.scala @@ -0,0 +1,94 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave + +import java.util.function.Supplier + +import geotrellis.proj4.CRS +import geotrellis.raster.{CellGrid, Dimensions, Raster, RasterExtent} +import geotrellis.util.MethodExtensions +import geotrellis.vector.Extent +import org.opengis.referencing.crs.CoordinateReferenceSystem +import spire.math._ +import spire.syntax.convertableFrom._ + +trait Implicits { + /** An implicit that converts object constructor into [[Supplier]] */ + implicit def supplier[T](f: () => T): Supplier[T] = { () => f() } + + /** Converts GeoTrellis [[CRS]] object to GeoTools [[CoordinateReferenceSystem]] */ + implicit def geotrellisCRSToGeoToolsCoordinateReferenceSystem(crs: CRS): CoordinateReferenceSystem = + crs.epsgCode match { + case Some(code) => org.geotools.referencing.CRS.decode(s"EPSG:${code}") + case _ => crs.toWKT() match { + case Some(wkt) => org.geotools.referencing.CRS.parseWKT(wkt) + case _ => throw new Exception("Can't convert GeoTrellis CRS into GeoTools CRS") + } + } + + /** Method extensions to normalize [[geotrellis.proj4.LatLng]] [[Extent]] */ + implicit class extentLatLngMethodExtensions(val self: Extent) extends MethodExtensions[Extent] { + // Y // [-90; +90] + private def clampLat(lat: Double): Double = if (lat < -90) -90 else if (lat > 90) 90 else lat + // X // [-180; +180] + private def clampLng(lng: Double): Double = if (lng < -180) -180 else if (lng > 180) 180 else lng + + // https://github.com/locationtech/proj4j/blob/v1.1.0/src/main/java/org/locationtech/proj4j/proj/Projection.java#L795-L803 + private def normalizeLng(lng: Double): Double = { + var angle = lng + while (angle > 180) angle -= 360 + while (angle < -180) angle += 360 + angle + } + + /** WARN: can be used only with [[geotrellis.proj4.LatLng]] only */ + def clampLatLng: Extent = { + Extent( + xmin = clampLng(self.xmin), + xmax = clampLng(self.xmax), + ymin = clampLat(self.ymin), + ymax = clampLat(self.ymax) + ) + } + + /** WARN: can be used only with [[geotrellis.proj4.LatLng]] only */ + def normalizeLatLng: Extent = { + val List(xmin, xmax) = List(normalizeLng(self.xmin), normalizeLng(self.xmax)).sorted + + Extent( + xmin = xmin, + xmax = xmax, + ymin = self.ymin, + ymax = self.ymax + ) + } + } + + /** Method extensions to normalize [[geotrellis.proj4.LatLng]] [[RasterExtent]] */ + implicit class rasterExtentLatLngMethodExtensions(val self: RasterExtent) extends MethodExtensions[RasterExtent] { + /** WARN: can be used only with [[geotrellis.proj4.LatLng]] only */ + def normalizeLatLng: RasterExtent = RasterExtent(self.extent.normalizeLatLng, self.cols, self.rows) + } + + /** Method extensions to normalize [[geotrellis.proj4.LatLng]] [[Extent]] */ + implicit class rasterLatLngMethodExtensions[T <: CellGrid[Int]](val self: Raster[T]) extends MethodExtensions[Raster[T]] { + /** WARN: can be used only with [[geotrellis.proj4.LatLng]] only */ + def normalizeLatLngExtent: Raster[T] = Raster(self.tile, self.extent.normalizeLatLng) + } +} + +object Implicits extends Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/AvroFieldReader.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/AvroFieldReader.scala new file mode 100644 index 0000000000..be0bf4e6c2 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/AvroFieldReader.scala @@ -0,0 +1,25 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter + +import geotrellis.store.avro._ +import org.locationtech.geowave.core.store.data.field.FieldReader + +/** GeoWave FieldReader subclass that uses GeoTrellis AvroRecordCodec to produce binary encoding */ +class AvroFieldReader[T: AvroRecordCodec] extends FieldReader[T] { + def readField(bytes: Array[Byte]): T = AvroEncoder.fromBinary[T](bytes) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/AvroFieldWriter.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/AvroFieldWriter.scala new file mode 100644 index 0000000000..e82ea162bc --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/AvroFieldWriter.scala @@ -0,0 +1,28 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter + +import geotrellis.store.avro._ +import org.locationtech.geowave.core.store.data.field.FieldWriter + +/** GeoWave FieldWrter that uses GeoTrellis AvroRecordCodec to read values + * @note all fields are reported as visible + */ +class AvroFieldWriter[T: AvroRecordCodec] extends FieldWriter[T, T] { + override def getVisibility(rowValue: T, fieldName: String, fieldValue: T): Array[Byte] = Array() + def writeField(field: T): Array[Byte] = AvroEncoder.toBinary(field) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/DataTypeRegistry.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/DataTypeRegistry.scala new file mode 100644 index 0000000000..24b51e5dbf --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/DataTypeRegistry.scala @@ -0,0 +1,35 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter + +import java.util.ServiceLoader + +object DataTypeRegistry { + val supportedTypes: List[DataType] = { + import scala.collection.JavaConverters._ + + ServiceLoader + .load(classOf[GeoTrellisDataAdapterProvider]) + .iterator() + .asScala + .flatMap(_.supportedDataTypes) + .toList + } + + def exists(dataType: DataType): Boolean = supportedTypes.contains(dataType) + def find(dataType: String): Option[DataType] = supportedTypes.find(_ == dataType) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/ElevationFieldHandler.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/ElevationFieldHandler.scala new file mode 100644 index 0000000000..644d964c47 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/ElevationFieldHandler.scala @@ -0,0 +1,24 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter + +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.index.dimension.Elevation + +trait ElevationFieldHandler[T] extends IndexFieldHandler[T] { + protected var fieldName: IndexFieldName = Elevation.DEFAULT_FIELD_NAME.indexFieldName +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapter.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapter.scala new file mode 100644 index 0000000000..bd72b4541d --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapter.scala @@ -0,0 +1,221 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter + +import java.nio.ByteBuffer +import java.util +import java.util.ServiceLoader + +import cats.data.Validated +import geotrellis.geowave.dsl.syntax._ +import org.locationtech.geowave.core.index.persist.{Persistable, PersistenceUtils} +import org.locationtech.geowave.core.index.{ByteArrayUtils, StringUtils, VarintUtils} +import org.locationtech.geowave.core.store.adapter.{AdapterPersistenceEncoding, IndexedAdapterPersistenceEncoding} +import org.locationtech.geowave.core.store.api.{DataTypeAdapter, Index} +import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset +import org.locationtech.geowave.core.store.data.field.{FieldReader, FieldWriter} +import org.locationtech.geowave.core.store.index.{CommonIndexModel, CommonIndexValue} +import org.slf4j.LoggerFactory + +import scala.collection.JavaConverters._ + +/** GeoTrellisDataAdapter abstracts over storing raster data indexed using GeoWave index. + * + * DataAdapter instances specifies: + * - How to extract index values from the input entry + * - How to read and write the entry for a GeoWave table + * - How the data will be stored in native (Cassandra) fields + * - What indexes are to be used when storing the data + * + * Each Adapter has a name, that should be unique per Cassandra namespace. + * Adapter may have a one-to-many relationship with indexes. + * - Relationships are persisted in `aim_geowave_metadata` table. + * + * DataApter and Index instances are associated together through GeoWave DataStore. + * + * {{{ + * val dataStore: DataStore = ??? + * // call to addType persists the adapter, index instances and their and relationships + * dataStore.addType(adapter, index1, index2) + * val writer = dataStore.createWriter[GeoTiff[MultibandTile]](adapter.getTypeName) + * writer.write(entry) + * }}} + * + * This will result with each entry being written to multiple tables each associated with a given index. + * + * In order to support this persistance mechanism each adapter must be registered with the adapter SPI in + * [[geotrellis.geowave.GeoTrellisPersistableRegistry]]. + * + * @note All adapters should have an empty constructor to use Serialization mechanisms. + * + * @note Adapter needs to be registered with the store only once. + * Once the adapter is used to write a record once, it is presisted to the store. + * All further reads and writes will read that adapter from the store. + * + * @see https://locationtech.github.io/geowave/devguide.html#adapters + */ +abstract class GeoTrellisDataAdapter[T <: AnyRef]( + /** The type name which serves as a unique identifier for this adapter. This also must be unique within a datastore. */ + private var typeName: TypeName, + private var fieldHandlers: List[IndexFieldHandler[T]] +) extends DataTypeAdapter[T] { + protected val logger = LoggerFactory.getLogger(this.getClass()) + protected val DATA_FIELD_ID: String + + /** + * Copied from the RasterDataAdapter, we can make it configurable though + * For SimpleFeatures it is a FeatureId, for odel this can be a ModelId + */ + override def getDataId(entry: T): Array[Byte] + + /** Gets a reader to read the value from the row */ + def getReader(fieldName: String): FieldReader[AnyRef] + + /** Gets a writer to write the value into the row */ + def getWriter(fieldName: String): FieldWriter[T, AnyRef] + + + // these are lazy because of initilization order + lazy val fieldNames = fieldHandlers.map(_.getFieldName) :+ DATA_FIELD_ID + lazy val fieldNameMatchingFieldHandlers = fieldHandlers.map { handler => + handler.getFieldName -> handler + }.toMap[String, IndexFieldHandler[T]] + lazy val fieldNameToIndex = fieldNames.zipWithIndex.toMap + lazy val indexToFieldName = fieldNameToIndex.map { case (k, v) => (v, k) } + + override def getTypeName: String = { + logger.trace(s"getTypeName: ${typeName}") + typeName + } + + /** + * Field handlers helpers. + * Helper is a function that knows how to extract index value from the entry. + */ + def getFieldHandlers = { + logger.trace(s"getFieldHandlers: $fieldHandlers") + fieldHandlers + } + + /** + * Validates that this adapter can provide the index with all fields required by its dimensions + */ + def validateIndexCompatability(index: Index): Validated[String, Index] = { + val availableFields = fieldHandlers.map(_.getFieldName.value).toSet + val requiredFields = index.getIndexModel.getDimensions.map(_.getFieldName).toSet + val missingFields = requiredFields.diff(availableFields) + + if (missingFields.isEmpty) Validated.valid(index) + else Validated.invalid(s"${index.getName} index requires ${missingFields} fields, ${availableFields} are available") + } + + def getFieldHandler(name: String): Option[IndexFieldHandler[T]] = { + logger.trace(s"getFieldHandler: ${name}") + fieldNameMatchingFieldHandlers.get(name) + } + + /** The position in a row is indexed, by the field name we can get the offset of the recorded field in a row */ + def getPositionOfOrderedField(commonIndexModel: CommonIndexModel, fieldName: String): Int = { + logger.trace(s"getPositionofOrderedField($fieldName)") + fieldNameToIndex(fieldName) + } + + /** The position in a row is indexed, by the field offset we can get the name of the recorded field in a row */ + def getFieldNameForPosition(commonIndexModel: CommonIndexModel, index: Int): String = { + logger.trace(s"getFieldNameForPosition($index)") + indexToFieldName(index) + } + + /** There is no need in using a handlers mechanism, since this is the only "DATA" field. */ + override def decode(data: IndexedAdapterPersistenceEncoding, index: Index): T = { + logger.trace(s"decode($index)") + data.getAdapterExtendedData.getValue(DATA_FIELD_ID).asInstanceOf[T] + } + + override def encode(entry: T, indexModel: CommonIndexModel): AdapterPersistenceEncoding = { + logger.trace(s"encode($entry)") + /** Each handler is a function that knows how to extract value from the entry that would be used to build index */ + val indexData = new MultiFieldPersistentDataset[CommonIndexValue]() + indexModel.getDimensions.foreach { dimension => + getFieldHandler(dimension.getFieldName).map { handler => + val value = handler.toIndexValue(entry) + indexData.addValue(dimension.getFieldName, value) + } + } + + /** Since we have a single value field, there are no needs in using handlers mechanisms to extract data from the entry */ + val extendedData = new MultiFieldPersistentDataset[AnyRef]() + extendedData.addValue(DATA_FIELD_ID, entry) + + new AdapterPersistenceEncoding(getDataId(entry), indexData, extendedData) + } + + /** + * Serializes adapter since it stores it in the table and uses this data on read + * to construct the adapter from the persisted parameters. + */ + def toBinary: Array[Byte] = { + val persistables = new util.LinkedHashSet[Persistable]() + fieldHandlers.map { indexHandler => persistables.add(indexHandler.asInstanceOf[Persistable]) } + + val typeNameBytes = StringUtils.stringToBinary(typeName) + val persistablesBytes = PersistenceUtils.toBinary(persistables) + + val buf = + ByteBuffer.allocate( + typeNameBytes.length + + persistablesBytes.length + + VarintUtils.unsignedIntByteLength(typeNameBytes.length) + + VarintUtils.unsignedIntByteLength(persistablesBytes.length)) + + VarintUtils.writeUnsignedInt(typeNameBytes.length, buf) + VarintUtils.writeUnsignedInt(persistablesBytes.length, buf) + + buf.put(typeNameBytes) + buf.put(persistablesBytes) + logger.trace(s"toBinary $this: ${buf.position} bytes") + buf.array + } + + def fromBinary(bytes: Array[Byte]): Unit = { + logger.trace(s"fromBinary: ${bytes.length} bytes") + val buf = ByteBuffer.wrap(bytes) + + val typeNameBytesLength = VarintUtils.readUnsignedInt(buf) + val persistablesBytesLength = VarintUtils.readUnsignedInt(buf) + + val typeName = StringUtils.stringFromBinary(ByteArrayUtils.safeRead(buf, typeNameBytesLength)) + val persistables = PersistenceUtils.fromBinaryAsList(ByteArrayUtils.safeRead(buf, persistablesBytesLength)).asScala.toList + + this.typeName = typeName.typeName + fieldHandlers = persistables.map(_.asInstanceOf[IndexFieldHandler[T]]) + } +} + +object GeoTrellisDataAdapter { + def load(dataType: DataType, typeName: TypeName): GeoTrellisDataAdapter[_] = { + import scala.collection.JavaConverters._ + + ServiceLoader + .load(classOf[GeoTrellisDataAdapterProvider]) + .iterator() + .asScala + .find(_.canProcess(dataType)) + .getOrElse(throw new RuntimeException(s"Unable to find DataAdapter for type $dataType")) + .adapter(typeName) + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapterProvider.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapterProvider.scala new file mode 100644 index 0000000000..2c72781f03 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapterProvider.scala @@ -0,0 +1,24 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter + + +trait GeoTrellisDataAdapterProvider { + def supportedDataTypes: List[DataType] + def canProcess(dataType: DataType): Boolean = supportedDataTypes.contains(dataType) + def adapter(typeName: TypeName): GeoTrellisDataAdapter[_] +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/GeometryFieldHandler.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/GeometryFieldHandler.scala new file mode 100644 index 0000000000..a88d072598 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/GeometryFieldHandler.scala @@ -0,0 +1,24 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter + +import geotrellis.geowave.dsl.syntax._ +import org.locationtech.geowave.core.geotime.store.dimension.GeometryWrapper + +trait GeometryFieldHandler[T] extends IndexFieldHandler[T] { + protected var fieldName: IndexFieldName = GeometryWrapper.DEFAULT_GEOMETRY_FIELD_NAME.indexFieldName +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/IndexFieldHandler.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/IndexFieldHandler.scala new file mode 100644 index 0000000000..69e360c2dc --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/IndexFieldHandler.scala @@ -0,0 +1,45 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter + +import geotrellis.geowave.dsl.syntax._ +import org.locationtech.geowave.core.index.StringUtils +import org.locationtech.geowave.core.index.persist.Persistable +import org.locationtech.geowave.core.store.index.CommonIndexValue + +/** This class is used by the DataAdapter to translate between native values and persistence + * encoded values. The basic implementation of this will perform type matching on the index field type. + * + * The field name is mutable so it can be made to match field names required by a given index. + */ +trait IndexFieldHandler[RowType] extends Persistable { + protected var fieldName: IndexFieldName + + def getFieldName: IndexFieldName = fieldName + + def setFieldName(name: IndexFieldName): Unit = this.fieldName = name + + def toIndexValue(row: RowType): CommonIndexValue + + override def toBinary: Array[Byte] = { + StringUtils.stringToBinary(fieldName) + } + + override def fromBinary(bytes: Array[Byte]): Unit = { + fieldName = StringUtils.stringFromBinary(bytes).indexFieldName + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/TimestampFieldHandler.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/TimestampFieldHandler.scala new file mode 100644 index 0000000000..8a9bf8b76d --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/TimestampFieldHandler.scala @@ -0,0 +1,24 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter + +import geotrellis.geowave.dsl.syntax._ + +trait TimestampFieldHandler[T] extends IndexFieldHandler[T] { + /** @see org.locationtech.geowave.core.geotime.store.dimensio.TimeField */ + protected var fieldName: IndexFieldName = "default_time_dimension".indexFieldName +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapter.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapter.scala new file mode 100644 index 0000000000..2628d0cfec --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapter.scala @@ -0,0 +1,91 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.geotiff + +import java.time.{ZoneOffset, ZonedDateTime} +import java.time.format.DateTimeFormatter + +import geotrellis.geowave.GeoTrellisPersistableRegistry +import geotrellis.geowave.adapter._ +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.index.dimension.Elevation +import geotrellis.raster.MultibandTile +import geotrellis.raster.io.geotiff.GeoTiff +import org.locationtech.geowave.core.geotime.store.dimension.{GeometryWrapper, Time} +import org.locationtech.geowave.core.store.data.field.{FieldReader, FieldWriter} + +/** + * All adapters should have an empty constructor to use Serialization mechanisms. + * Each new Adapter should also be registered in the [[GeoTrellisPersistableRegistry]]. + */ +class GeoTiffAdapter( + private var typeName: TypeName = "".typeName, + private var fieldHandlers: List[IndexFieldHandler[GeoTiff[MultibandTile]]] = List( + new GeoTiffAdapter.GeometryHandler, + new GeoTiffAdapter.TimestampHandler, + new GeoTiffAdapter.ElevationHandler) +) extends GeoTrellisDataAdapter[GeoTiff[MultibandTile]](typeName, fieldHandlers) { + protected val DATA_FIELD_ID = "GT_TIFF" + + /** + * For SimpleFeatures it is a FeatureId, for model this can be a ModelId, smth to help filter the results + * In case the partitionKey would mach across several entries due to SFC limitations + */ + def getDataId(t: GeoTiff[MultibandTile]): Array[Byte] = Array(0) + + /** Gets a reader to read the value from the row */ + def getReader(fieldName: String): FieldReader[AnyRef] = + if(fieldName == DATA_FIELD_ID) new GeoTiffFieldReader() else null + + /** Gets a writer to write the value into the row */ + def getWriter(fieldName: String): FieldWriter[GeoTiff[MultibandTile], AnyRef] = + new GeoTiffFieldWriter() +} + +object GeoTiffAdapter { + + /** A function that extracts [[geotrellis.vector.Extent]] from the [[GeoTiff[MultibandTile]]] */ + class GeometryHandler extends GeometryFieldHandler[GeoTiff[MultibandTile]] { + def toIndexValue(tiff: GeoTiff[MultibandTile]): GeometryWrapper = + new GeometryWrapper(tiff.extent.toPolygon(), Array()) + } + + + /** A function that extracts [[Elevation]] from the [[GeoTiff[MultibandTile]]] */ + class ElevationHandler extends ElevationFieldHandler[GeoTiff[MultibandTile]] { + def toIndexValue(tiff: GeoTiff[MultibandTile]): Elevation = + Elevation(java.lang.Double.parseDouble(tiff.tags.headTags("ELEVATION"))) + } + + + /** A function that extracts [[Time]] from the [[GeoTiff[MultibandTile]]] */ + class TimestampHandler extends TimestampFieldHandler[GeoTiff[MultibandTile]] { + def toIndexValue(tiff: GeoTiff[MultibandTile]): Time = { + val dateString = tiff.tags.headTags(GEOTIFF_TIME_TAG_DEFAULT) + val dateTime = ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)) + new Time.Timestamp(dateTime.toInstant.toEpochMilli, Array()) + } + } + + /** Corresponds to Time.DEFAULT_FIELD_ID */ + val GEOTIFF_TIME_TAG_DEFAULT = "TIFFTAG_DATETIME" + val GEOTIFF_TIME_FORMAT_DEFAULT = "yyyy:MM:dd HH:mm:ss" + val GEOTIFF_TIME_FORMAT = "GEOTIFF_TIME_FORMAT" + val GEOTIFF_TIME_FORMATTER_DEFAULT = timeFormatter(GEOTIFF_TIME_FORMAT_DEFAULT) + + def timeFormatter(pattern: String): DateTimeFormatter = DateTimeFormatter.ofPattern(pattern).withZone(ZoneOffset.UTC) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapterProvider.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapterProvider.scala new file mode 100644 index 0000000000..02ae77c826 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapterProvider.scala @@ -0,0 +1,25 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.geotiff + +import geotrellis.geowave.adapter._ + +class GeoTiffAdapterProvider extends GeoTrellisDataAdapterProvider { + def supportedDataTypes: List[DataType] = List(DataTypeGeoTiff) + def adapter(typeName: TypeName): GeoTrellisDataAdapter[_] = + new GeoTiffAdapter(typeName) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffFieldReader.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffFieldReader.scala new file mode 100644 index 0000000000..b4b044ffbc --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffFieldReader.scala @@ -0,0 +1,26 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.geotiff + +import geotrellis.raster.MultibandTile +import geotrellis.raster.io.geotiff.GeoTiff +import geotrellis.raster.io.geotiff.reader.GeoTiffReader +import org.locationtech.geowave.core.store.data.field.FieldReader + +class GeoTiffFieldReader extends FieldReader[GeoTiff[MultibandTile]] { + def readField(bytes: Array[Byte]): GeoTiff[MultibandTile] = GeoTiffReader.readMultiband(bytes) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffFieldWriter.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffFieldWriter.scala new file mode 100644 index 0000000000..2f7bcc92f3 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/GeoTiffFieldWriter.scala @@ -0,0 +1,26 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.geotiff + +import geotrellis.raster.MultibandTile +import geotrellis.raster.io.geotiff.GeoTiff +import org.locationtech.geowave.core.store.data.field.FieldWriter + +class GeoTiffFieldWriter extends FieldWriter[GeoTiff[MultibandTile], GeoTiff[MultibandTile]] { + override def getVisibility(rowValue: GeoTiff[MultibandTile], fieldName: String, fieldValue: GeoTiff[MultibandTile]): Array[Byte] = Array() + def writeField(field: GeoTiff[MultibandTile]): Array[Byte] = field.toByteArray +} diff --git a/geowave/src/main/scala/geotrellis/spark/store/geowave/package.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/package.scala similarity index 52% rename from geowave/src/main/scala/geotrellis/spark/store/geowave/package.scala rename to geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/package.scala index 15ed032072..864ea1703b 100644 --- a/geowave/src/main/scala/geotrellis/spark/store/geowave/package.scala +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/geotiff/package.scala @@ -1,5 +1,5 @@ /* - * Copyright 2019 Azavea + * Copyright 2020 Azavea * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,15 +14,10 @@ * limitations under the License. */ -package geotrellis.spark.store +package geotrellis.geowave.adapter -import org.locationtech.jts.{geom => jts} -import org.locationtech.jts.io.WKTWriter -import com.vividsolutions.jts.io.{WKTReader => OLDWKTReader} -import com.vividsolutions.jts.{geom => jtsOld} +import geotrellis.geowave.dsl.syntax._ -package object geowave { - /** An ugly conversion function from the new jts.Geometry type into the old GeoWave compatible Geometry type */ - implicit def geometryConversion(geom: jts.Geometry): jtsOld.Geometry = - new OLDWKTReader().read(new WKTWriter().write(geom)) +package object geotiff { + val DataTypeGeoTiff = "GeoTiff".dataType } diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/package.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/package.scala new file mode 100644 index 0000000000..b4659d63ff --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/package.scala @@ -0,0 +1,48 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave + +import io.estatico.newtype.macros.newsubtype +import org.locationtech.geowave.core.store.data.field.{FieldReader, FieldWriter} + +package object adapter { + + /** Input data type that can be used to find a [[GeoTrellisDataAdapter]] instance. + * DataType describes both the format and semantics of the data + * + * DataType describes how to get the reader for the data we need to ingest. + * It does not describe how it will be stored and indexed, that is controled by the DataAdapter. + */ + @newsubtype case class DataType(value: String) + + /** Name for [[GeoTrellisDataAdapter]] instance. + * - This name is provided during index configuration + * - This name must be given when ingesting a file + */ + @newsubtype case class TypeName(value: String) + + /** Name of a field on which a feature is indexed. + * + * List of field names per DataType are defined by which [[NamedIndexFieldHandler]]s are provided to [[GeoTrellisDataAdapter]]. + * Thus this name only has any meaning in relation to specific [[GeoTrellisDataAdapter]] instance. + * Index field values are stored in Common Index Data and are used for query refinement. + */ + @newsubtype case class IndexFieldName(value: String) + + implicit def upcastFieldReader[T](fieldReader: FieldReader[T]): FieldReader[AnyRef] = fieldReader.asInstanceOf[FieldReader[AnyRef]] + implicit def upcastFieldWriter[T](fieldWriter: FieldWriter[T, T]): FieldWriter[T, AnyRef] = fieldWriter.asInstanceOf[FieldWriter[T, AnyRef]] +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/raster/MulitbandRasterAdapter.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/MulitbandRasterAdapter.scala new file mode 100644 index 0000000000..555df7ebc4 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/MulitbandRasterAdapter.scala @@ -0,0 +1,57 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.raster + +import geotrellis.geowave.GeoTrellisPersistableRegistry +import geotrellis.geowave.adapter._ +import geotrellis.geowave.dsl.syntax._ +import geotrellis.raster.{MultibandTile, Raster} +import org.locationtech.geowave.core.geotime.store.dimension.GeometryWrapper +import org.locationtech.geowave.core.store.data.field.{FieldReader, FieldWriter} + +/** + * All adapters should have an empty constructor to use Serialization mechanisms. + * Each new Adapter should also be registered in the [[GeoTrellisPersistableRegistry]]. + */ +class MulitbandRasterAdapter( + private var typeName: TypeName = "".typeName, + private var fieldHandlers: List[IndexFieldHandler[Raster[MultibandTile]]] = List( + new MulitbandRasterAdapter.GeometryHandler + ) +) extends GeoTrellisDataAdapter[Raster[MultibandTile]](typeName, fieldHandlers) { + protected val DATA_FIELD_ID = "GT_RASTER" + + /** + * For SimpleFeatures it is a FeatureId, for model this can be a ModelId, smth to help filter the results + * In case the partitionKey would mach across several entries due to SFC limitations + */ + def getDataId(t: Raster[MultibandTile]): Array[Byte] = Array(0) + + /** Gets a reader to read the value from the row */ + def getReader(fieldName: String): FieldReader[AnyRef] = if(fieldName == DATA_FIELD_ID) new MultibandRasterReader() else null + + /** Gets a writer to write the value into the row */ + def getWriter(fieldName: String): FieldWriter[Raster[MultibandTile], AnyRef] = new MultibandRasterWriter() +} + +object MulitbandRasterAdapter { + /** A function that extracts [[geotrellis.vector.Extent]] from the [[Raster[MultibandTile]]] */ + class GeometryHandler extends GeometryFieldHandler[Raster[MultibandTile]] { + def toIndexValue(raster: Raster[MultibandTile]): GeometryWrapper = + new GeometryWrapper(raster.extent.toPolygon(), Array()) + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/raster/MultibandRasterReader.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/MultibandRasterReader.scala new file mode 100644 index 0000000000..bf4f574f25 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/MultibandRasterReader.scala @@ -0,0 +1,27 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.raster + +import geotrellis.geowave.adapter.raster.avro._ +import geotrellis.raster.{MultibandTile, Raster} +import geotrellis.store.avro.AvroEncoder +import org.locationtech.geowave.core.store.data.field.FieldReader + +class MultibandRasterReader extends FieldReader[Raster[MultibandTile]] { + def readField(bytes: Array[Byte]): Raster[MultibandTile] = AvroEncoder.fromBinary[Raster[MultibandTile]](bytes) +} + diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/raster/MultibandRasterWriter.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/MultibandRasterWriter.scala new file mode 100644 index 0000000000..b272372d7c --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/MultibandRasterWriter.scala @@ -0,0 +1,27 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.raster + +import geotrellis.geowave.adapter.raster.avro._ +import geotrellis.raster.{MultibandTile, Raster} +import geotrellis.store.avro._ +import org.locationtech.geowave.core.store.data.field.FieldWriter + +class MultibandRasterWriter extends FieldWriter[Raster[MultibandTile], Raster[MultibandTile]] { + override def getVisibility(rowValue: Raster[MultibandTile], fieldName: String, fieldValue: Raster[MultibandTile]): Array[Byte] = Array() + def writeField(field: Raster[MultibandTile]): Array[Byte] = AvroEncoder.toBinary(field.mapTile(_.toArrayTile(): MultibandTile)) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/Implicits.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/Implicits.scala new file mode 100644 index 0000000000..0fb8ec8627 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/Implicits.scala @@ -0,0 +1,21 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.raster.avro + +trait Implicits extends RasterCodec + +object Implicits extends Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/RasterCodec.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/RasterCodec.scala new file mode 100644 index 0000000000..e4abe4147b --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/RasterCodec.scala @@ -0,0 +1,47 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.raster.avro + +import geotrellis.raster.{CellGrid, Raster} +import geotrellis.store.avro._ +import geotrellis.vector.Extent + +import org.apache.avro.SchemaBuilder +import org.apache.avro.generic.GenericRecord + +trait RasterCodec { + implicit def rasterCodec[T <: CellGrid[Int]: AvroRecordCodec]: AvroRecordCodec[Raster[T]] = new AvroRecordCodec[Raster[T]] { + def schema = SchemaBuilder + .record("Raster").namespace("geotrellis.raster") + .fields() + .name("tile").`type`(AvroRecordCodec[T].schema).noDefault + .name("extent").`type`(AvroRecordCodec[Extent].schema).noDefault + .endRecord() + + def encode(raster: Raster[T], rec: GenericRecord): Unit = { + rec.put("tile", AvroRecordCodec[T].encode(raster.tile)) + rec.put("extent", AvroRecordCodec[Extent].encode(raster.extent)) + } + + def decode(rec: GenericRecord): Raster[T] = { + val tile = AvroRecordCodec[T].decode(rec[GenericRecord]("tile")) + val extent = AvroRecordCodec[Extent].decode(rec[GenericRecord]("extent")) + Raster(tile, extent) + } + } + +} diff --git a/geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/package.scala b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/package.scala new file mode 100644 index 0000000000..889f5fd9e5 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/adapter/raster/avro/package.scala @@ -0,0 +1,19 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.raster + +package object avro extends Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/api/SQueryBuilder.scala b/geowave/src/main/scala/geotrellis/geowave/api/SQueryBuilder.scala new file mode 100644 index 0000000000..bc66ce7ffe --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/api/SQueryBuilder.scala @@ -0,0 +1,23 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.api + +import org.locationtech.geowave.core.store.api.QueryBuilder + +object SQueryBuilder { + def newBuilder[T]: SQueryBuilder[T] = QueryBuilder.newBuilder[T]().asInstanceOf[SQueryBuilder[T]] +} diff --git a/geowave/src/main/scala/geotrellis/geowave/api/package.scala b/geowave/src/main/scala/geotrellis/geowave/api/package.scala new file mode 100644 index 0000000000..a25b06c986 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/api/package.scala @@ -0,0 +1,27 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave + +import cats.Semigroup +import org.locationtech.geowave.core.store.api.QueryBuilder +import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass + +package object api { + type SQueryBuilder[T] = QueryBuilder[T, R] forSome { type R <: QueryBuilder[T, R] } + + implicit val constraintsByClassSemigroup: Semigroup[ConstraintsByClass] = { _ merge _ } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/conf/Implicits.scala b/geowave/src/main/scala/geotrellis/geowave/conf/Implicits.scala new file mode 100644 index 0000000000..0415d787cd --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/conf/Implicits.scala @@ -0,0 +1,27 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.conf + +import io.estatico.newtype.Coercible +import pureconfig.ConfigReader + +trait Implicits { + /** Derive pureconfig codecs for newtypes. */ + implicit def coercibleConfigReader[R, N](implicit ev: Coercible[ConfigReader[R], ConfigReader[N]], R: ConfigReader[R]): ConfigReader[N] = ev(R) +} + +object Implicits extends Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/conf/StoreConfiguration.scala b/geowave/src/main/scala/geotrellis/geowave/conf/StoreConfiguration.scala new file mode 100644 index 0000000000..f53f976d1f --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/conf/StoreConfiguration.scala @@ -0,0 +1,42 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.conf + +import cats.effect.Sync +import cats.syntax.option._ +import geotrellis.geowave.dsl.DataStoreType +import org.locationtech.geowave.core.store.api.DataStore +import pureconfig._ +import pureconfig.generic.auto._ + +case class StoreConfiguration( + dataStoreType: DataStoreType, + options: Option[Map[String, String]] = None +) { + def getDataStore: DataStore = dataStoreType.getDataStore(options.getOrElse(Map())) + def getDataStore(namespace: String): DataStore = getDataStore(namespace.some) + def getDataStore(namespace: Option[String]): DataStore = { + val opts = options.getOrElse(Map()) + dataStoreType.getDataStore(namespace.fold(opts)(ns => opts + ("gwNamespace" -> ns))) + } +} + +object StoreConfiguration { + lazy val load: StoreConfiguration = ConfigSource.default.at("geotrellis.geowave.connection.store").loadOrThrow[StoreConfiguration] + + implicit def StoreConfigurationToClass(obj: StoreConfiguration.type): StoreConfiguration = load +} diff --git a/geowave/src/main/scala/geotrellis/geowave/conf/package.scala b/geowave/src/main/scala/geotrellis/geowave/conf/package.scala new file mode 100644 index 0000000000..cb58c2bead --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/conf/package.scala @@ -0,0 +1,19 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave + +package object conf extends conf.Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/DataAdapterParameters.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/DataAdapterParameters.scala new file mode 100644 index 0000000000..a0ffca94c8 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/DataAdapterParameters.scala @@ -0,0 +1,51 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import cats.syntax.either._ +import geotrellis.geowave.adapter.{DataType, TypeName} +import geotrellis.geowave.dsl.json.{JsonValidator, _} +import io.circe.generic.extras.ConfiguredJsonCodec + +trait DataAdapterParameters { + /** Adapter name with configured indexes + * This is going to be used as a name for [[geotrellis.geowave.adapter.GeoTrellisDataAdapter]] + */ + def typeName: TypeName + + /** Name of data type supported by the Adapter + * This is used to produce an adapter instance that will be registered with given index + */ + def dataType: DataType + + /** Storage (i.e. Cassandra) namespace to be used for connection + * Follow up ingest message should be using consistent namespace values + */ + def namespace: Option[String] +} + +object DataAdapterParameters { + @ConfiguredJsonCodec + case class Header(typeName: TypeName, dataType: DataType, namespace: Option[String]) extends DataAdapterParameters + + implicit val ingestParametersValidator: JsonValidator[DataAdapterParameters.Header] = { json => + JsonValidator + .validateMessageHeader(json) + .toEither + .flatMap(_ => json.as[DataAdapterParameters.Header].leftMap(JsonValidatorErrors(_))) + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/DataTypeReader.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/DataTypeReader.scala new file mode 100644 index 0000000000..6129f7d26f --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/DataTypeReader.scala @@ -0,0 +1,33 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import java.net.URI + +import cats.~> + +/** DataTypeReader abstracts over the ability to read and split a data format during ingest. + * The type encoding is controlled by the M type parameter. + * It is therefore possible to read GeoTiff multiple ways by varying O and M + */ +trait DataTypeReader[F[_], O, R] { self => + def read(uri: URI, options: Option[O]): F[Iterator[R]] + + def mapK[G[_]](f: F ~> G): DataTypeReader[G, O, R] = new DataTypeReader[G, O, R] { + def read(uri: URI, options: Option[O]): G[Iterator[R]] = f(self.read(uri, options)) + } +} \ No newline at end of file diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/DeleteParameters.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/DeleteParameters.scala new file mode 100644 index 0000000000..e12056059b --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/DeleteParameters.scala @@ -0,0 +1,76 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import cats.instances.option._ +import cats.syntax.either._ +import cats.syntax.semigroup._ +import geotrellis.geowave.adapter.TypeName +import geotrellis.geowave.api._ +import geotrellis.geowave.dsl.json._ +import geotrellis.geowave.index.dimension.ElevationDefinition +import io.circe.generic.extras.ConfiguredJsonCodec +import geotrellis.vector._ +import org.locationtech.geowave.core.geotime.index.dimension.{SimpleTimeDefinition, TimeDefinition} +import org.locationtech.geowave.core.geotime.store.dimension.Time.TimeRange +import org.locationtech.geowave.core.geotime.util.GeometryUtils +import org.locationtech.geowave.core.index.sfc.data.NumericRange +import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.{ConstraintData, ConstraintSet, ConstraintsByClass} +import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter + +@ConfiguredJsonCodec +case class DeleteParameters( + typeName: TypeName, + indexName: String, + geometry: Option[Geometry] = None, + namespace: Option[String] = None, + time: Option[TimeRange] = None, + elevation: Option[NumericRange] = None, + compareOp: BasicQueryFilter.BasicQueryCompareOperation = BasicQueryFilter.BasicQueryCompareOperation.INTERSECTS +) extends QueryConfiguration { + def constraints: Option[ConstraintsByClass] = { + val gc = geometry.map(GeometryUtils.basicConstraintsFromGeometry) + val tc = time.map { timeRange => + new ConstraintsByClass( + new ConstraintSet( + new ConstraintData(timeRange.toNumericData(), false), + classOf[TimeDefinition], + classOf[SimpleTimeDefinition] + ) + ) + } + val dc = elevation.map { elevationRange => + new ConstraintsByClass( + new ConstraintSet( + new ConstraintData(elevationRange, false), + classOf[ElevationDefinition] + ) + ) + } + + (gc |+| tc) |+| dc + } +} + +object DeleteParameters { + implicit val deleteParametersValidator: JsonValidator[DeleteParameters] = { json => + JsonValidator + .validateDeleteParameters(json) + .toEither + .flatMap(_ => json.as[DeleteParameters].leftMap(JsonValidatorErrors(_))) + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/IndexParameters.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/IndexParameters.scala new file mode 100644 index 0000000000..d11a4189c9 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/IndexParameters.scala @@ -0,0 +1,92 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import cats.syntax.either._ +import geotrellis.geowave.adapter.{DataType, TypeName} +import geotrellis.geowave.conf.StoreConfiguration +import geotrellis.geowave.dsl.json.{JsonValidator, _} +import io.circe.generic.extras.ConfiguredJsonCodec +import org.locationtech.geowave.core.store.api.{DataStore, Index} + +/** Kafka message to configure and create an index for specific data type + * + * @param indices List of index definitions that would be used by an adapter + * @param typeName Name of the type to be used in the table + * @param dataType Data type to expect for ingest + * @param namespace Store (i.e. Cassandra) namespace + */ +@ConfiguredJsonCodec +case class IndexParameters( + /** A list of [[IndexDefinition]]s + * Each index corresponds to a table in which each record will be stored + */ + indices: List[IndexDefinition], + + /** Adapter name with configured indexes + * This is going to be used as a name for [[geotrellis.geowave.adapter.GeoTrellisDataAdapter]] + */ + typeName: TypeName, + + /** Name of data type supported by the Adapter + * This is used to produce an adapter instance that will be registered with given index + */ + dataType: DataType, + + /** Storage (i.e. Cassandra) namespace to be used for connection + * Follow up ingest message should be using consistent namespace values + */ + namespace: Option[String] = None +) extends DataAdapterParameters { + def dataStore: DataStore = StoreConfiguration.getDataStore(namespace) +} + +object IndexParameters { + implicit val indexParametersValidator: JsonValidator[IndexParameters] = { json => + JsonValidator + .validateIndexParameters(json) + .toEither + .flatMap(_ => json.as[IndexParameters].leftMap(JsonValidatorErrors(_))) + } +} + +/** Each adapter can have a relation to multiple indicies. + * This class defines a single Index definition + * + * @param indexName Unique name for index instance + * The instance captures index type (ex: spatial, spatial_temporal) and the parameters, dimension resolutions and or bounds + * This name will be auto-generated based on index type and options but may be overwritten here + * + * The index name is used as table name for all ingested data. + * Using this ... + * @param indexType Name of the index type available through GeoWave IndexPlugin SPI + * @param indexOptions Index Options specific to each [[indexType]] within GeoWave IndexPlugin SPI + * These are options may be configured through: + * 'org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi' + */ +@ConfiguredJsonCodec +case class IndexDefinition( + indexName: Option[String], + indexType: IndexType, + indexOptions: Map[String, String] +) { + def getIndex: Index = indexType.getIndex(indexOptions, indexName) +} + +object IndexDefinition { + implicit def indexDefinitiontoList(indexDefinition: IndexDefinition): List[IndexDefinition] = indexDefinition :: Nil +} \ No newline at end of file diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/IngestParameters.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/IngestParameters.scala new file mode 100644 index 0000000000..c1520c4ea0 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/IngestParameters.scala @@ -0,0 +1,50 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import java.net.URI + +import cats.syntax.either._ +import geotrellis.geowave.adapter.{DataType, TypeName} +import geotrellis.geowave.conf.StoreConfiguration +import geotrellis.geowave.dsl.json.{JsonValidator, _} +import io.circe.generic.extras.semiauto._ +import io.circe.{Decoder, Encoder} +import org.locationtech.geowave.core.store.api.{DataStore, Writer} + +case class IngestParameters[A]( + typeName: TypeName, + dataType: DataType, + uri: URI, + options: Option[A] = None, + namespace: Option[String] = None +) extends DataAdapterParameters { + def dataStore: DataStore = StoreConfiguration.getDataStore(namespace) + def writer[T]: Writer[T] = dataStore.createWriter[T](typeName.value) +} + +object IngestParameters { + implicit def ingestParametersDecoder[A: Decoder] = deriveConfiguredDecoder[IngestParameters[A]] + implicit def ingestParametersEncoder[A: Encoder] = deriveConfiguredEncoder[IngestParameters[A]] + + implicit def ingestParametersValidator[A: Decoder]: JsonValidator[IngestParameters[A]] = { json => + JsonValidator + .validateIngestParameters(json) + .toEither + .flatMap(_ => json.as[IngestParameters[A]].leftMap(JsonValidatorErrors(_))) + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/Metadata.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/Metadata.scala new file mode 100644 index 0000000000..df0a195315 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/Metadata.scala @@ -0,0 +1,30 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import geotrellis.layer.MapKeyTransform +import geotrellis.raster.RasterExtent +import geotrellis.vector.Extent + +trait Metadata { + type VoxelBoundsInternal <: VoxelBounds + def bounds: VoxelBoundsInternal + def extent: Extent + + def mapKeyTransform: MapKeyTransform = MapKeyTransform(extent, bounds.colMax, bounds.rowMax) + def rasterExtent: RasterExtent = RasterExtent(extent, bounds.colMax + 1, bounds.rowMax + 1) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/QueryConfiguration.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/QueryConfiguration.scala new file mode 100644 index 0000000000..4630a2fc24 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/QueryConfiguration.scala @@ -0,0 +1,44 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import geotrellis.geowave.adapter.TypeName +import geotrellis.geowave.api.SQueryBuilder +import geotrellis.geowave.conf.StoreConfiguration +import org.locationtech.geowave.core.store.api.{DataStore, Query, Writer} +import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass +import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass +import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter + +trait QueryConfiguration { + def compareOp: BasicQueryFilter.BasicQueryCompareOperation + def constraints: Option[ConstraintsByClass] + def query: Query[_] = { + val q = SQueryBuilder + .newBuilder + .addTypeName(typeName.value) + .indexName(indexName) + + constraints.fold(q.build)(c => q.constraints(new BasicQueryByClass(c, compareOp)).build) + } + + def indexName: String + def typeName: TypeName + def namespace: Option[String] + def dataStore: DataStore = StoreConfiguration.getDataStore(namespace) + def writer[T]: Writer[T] = dataStore.createWriter[T](typeName.value) +} \ No newline at end of file diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/TilingBounds.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/TilingBounds.scala new file mode 100644 index 0000000000..b1c1a864fb --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/TilingBounds.scala @@ -0,0 +1,28 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import geotrellis.geowave.dsl.json._ +import io.circe.generic.extras.ConfiguredJsonCodec + +@ConfiguredJsonCodec +case class TilingBounds( + width: Option[Int] = None, + height: Option[Int] = None, + depth: Option[Int] = None, + spissitude: Option[Int] = None +) \ No newline at end of file diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds.scala new file mode 100644 index 0000000000..7befb2c755 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds.scala @@ -0,0 +1,28 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import geotrellis.raster.GridBounds + +/** VoxelBounds that we'll use for the internal API that allows to abstract over bounds dimensions; Doxel Bounds? */ +trait VoxelBounds { self => + def colMin: Int + def colMax: Int + def rowMin: Int + def rowMax: Int + def toGridBounds: GridBounds[Int] = GridBounds[Int](colMin, rowMin, colMax, rowMax) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds2D.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds2D.scala new file mode 100644 index 0000000000..022a7823f7 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds2D.scala @@ -0,0 +1,59 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import geotrellis.raster.split.Split.Options +import spire.syntax.cfor.cfor + +case class VoxelBounds2D(colMin: Int, colMax: Int, rowMin: Int, rowMax: Int) extends VoxelBounds { + def toVoxelDimensions: VoxelDimensions2D = VoxelDimensions2D(colMax - colMin, rowMax - rowMin) + + def split(tb: TilingBounds): Seq[VoxelBounds2D] = split(toVoxelDimensions.withTilingBounds(tb)) + + def split(tb: TilingBounds, options: Options): Seq[VoxelBounds2D] = split(toVoxelDimensions.withTilingBounds(tb), options) + + def split(dims: VoxelDimensions2D): Seq[VoxelBounds2D] = split(dims, Options.DEFAULT) + + def split(dims: VoxelDimensions2D, options: Options): Seq[VoxelBounds2D] = { + val (tileCols, tileRows) = dims.width -> dims.height + val (layoutCols, layoutRows) = (colMax / dims.width, rowMax / dims.height) + + val splits = Array.ofDim[VoxelBounds2D](layoutCols * layoutRows) + + cfor(0)(_ < layoutRows, _ + 1) { layoutRow => + cfor(0)(_ < layoutCols, _ + 1) { layoutCol => + val firstCol = layoutCol * tileCols + val lastCol = { + val x = firstCol + tileCols - 1 + if (!options.extend && x > tileCols - 1) tileCols - 1 + else x + } + val firstRow = layoutRow * tileRows + val lastRow = { + val x = firstRow + tileRows - 1 + if (!options.extend && x > tileRows - 1) tileRows - 1 + else x + } + + val vb = VoxelBounds2D(firstCol, lastCol, firstRow, lastRow) + splits(layoutRow * layoutCols + layoutCol) = vb + } + } + + splits.toSeq + } +} \ No newline at end of file diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds3D.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds3D.scala new file mode 100644 index 0000000000..1f953af2a7 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds3D.scala @@ -0,0 +1,67 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import geotrellis.raster.split.Split.Options +import spire.syntax.cfor.cfor + +case class VoxelBounds3D( + colMin: Int, colMax: Int, + rowMin: Int, rowMax: Int, + depthMin: Int, depthMax: Int +) extends VoxelBounds { + def toVoxelDimensions: VoxelDimensions3D = VoxelDimensions3D(colMax - colMin, rowMax - rowMin, depthMax - depthMin) + def split(tb: TilingBounds): Seq[VoxelBounds3D] = split(toVoxelDimensions.withTilingBounds(tb)) + def split(tb: TilingBounds, options: Options): Seq[VoxelBounds3D] = split(toVoxelDimensions.withTilingBounds(tb), options) + def split(dims: VoxelDimensions3D): Seq[VoxelBounds3D] = split(dims, Options.DEFAULT) + def split(dims: VoxelDimensions3D, options: Options = Options.DEFAULT): Seq[VoxelBounds3D] = { + val (tileCols, tileRows, tileDepths) = (dims.width, dims.height, dims.depth) + val (layoutCols, layoutRows, layoutDepths) = (colMax / dims.width, rowMax / dims.height, depthMax / dims.depth) + + val splits = Array.ofDim[VoxelBounds3D](layoutCols * layoutRows * layoutDepths) + + cfor(0)(_ < layoutDepths, _ + 1) { layoutDepth => + cfor(0)(_ < layoutRows, _ + 1) { layoutRow => + cfor(0)(_ < layoutCols, _ + 1) { layoutCol => + val firstCol = layoutCol * tileCols + val lastCol = { + val x = firstCol + tileCols - 1 + if (!options.extend && x > tileCols - 1) tileCols - 1 + else x + } + val firstRow = layoutRow * tileRows + val lastRow = { + val x = firstRow + tileRows - 1 + if (!options.extend && x > tileRows - 1) tileRows - 1 + else x + } + val firstDepth = layoutDepth * tileDepths + val lastDepth = { + val x = firstDepth + tileDepths - 1 + if (!options.extend && x > tileDepths - 1) tileDepths - 1 + else x + } + + val vb = VoxelBounds3D(firstCol, lastCol, firstRow, lastRow, firstDepth, lastDepth) + splits(layoutDepth * layoutCols * layoutRows + layoutRow * layoutCols + layoutCol) = vb + } + } + } + + splits.toSeq + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds4D.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds4D.scala new file mode 100644 index 0000000000..a63531935c --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelBounds4D.scala @@ -0,0 +1,76 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import geotrellis.raster.split.Split.Options +import spire.syntax.cfor.cfor + +case class VoxelBounds4D( + colMin: Int, colMax: Int, + rowMin: Int, rowMax: Int, + depthMin: Int, depthMax: Int, + spissitudeMin: Int, spissitudeMax: Int +) extends VoxelBounds { + def toVoxelDimensions: VoxelDimensions4D = VoxelDimensions4D(colMax - colMin, rowMax - rowMin, depthMax - depthMin, spissitudeMax - spissitudeMin) + def split(tb: TilingBounds): Seq[VoxelBounds4D] = split(toVoxelDimensions.withTilingBounds(tb)) + def split(tb: TilingBounds, options: Options): Seq[VoxelBounds4D] = split(toVoxelDimensions.withTilingBounds(tb), options) + def split(dims: VoxelDimensions4D): Seq[VoxelBounds4D] = split(dims, Options.DEFAULT) + def split(dims: VoxelDimensions4D, options: Options = Options.DEFAULT): Seq[VoxelBounds4D] = { + val (tileCols, tileRows, tileDepths, tileSpissitudes) = (dims.width, dims.height, dims.depth, dims.spissitude) + val (layoutCols, layoutRows, layoutDepths, layoutSpissitudes) = (colMax / dims.width, rowMax / dims.height, depthMax / dims.depth, spissitudeMax / dims.spissitude) + + val splits = Array.ofDim[VoxelBounds4D](layoutCols * layoutRows * layoutDepths * layoutSpissitudes) + + cfor(0)(_ < layoutSpissitudes, _ + 1) { layoutSpissitude => + cfor(0)(_ < layoutDepths, _ + 1) { layoutDepth => + cfor(0)(_ < layoutRows, _ + 1) { layoutRow => + cfor(0)(_ < layoutCols, _ + 1) { layoutCol => + val firstCol = layoutCol * tileCols + val lastCol = { + val x = firstCol + tileCols - 1 + if (!options.extend && x > tileCols - 1) tileCols - 1 + else x + } + val firstRow = layoutRow * tileRows + val lastRow = { + val x = firstRow + tileRows - 1 + if (!options.extend && x > tileRows - 1) tileRows - 1 + else x + } + val firstDepth = layoutDepth * tileDepths + val lastDepth = { + val x = firstDepth + tileDepths - 1 + if (!options.extend && x > tileDepths - 1) tileDepths - 1 + else x + } + val firstSpissitude = layoutSpissitude * tileSpissitudes + val lastSpissitude = { + val x = firstSpissitude + tileSpissitudes - 1 + if (!options.extend && x > tileSpissitudes - 1) tileSpissitudes - 1 + else x + } + + val vb = VoxelBounds4D(firstCol, lastCol, firstRow, lastRow, firstDepth, lastDepth, firstSpissitude, lastSpissitude) + splits(layoutSpissitude * layoutDepths * layoutCols * layoutRows + layoutDepth * layoutCols * layoutRows + layoutRow * layoutCols + layoutCol) = vb + } + } + } + } + + splits.toSeq + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelDimensions.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelDimensions.scala new file mode 100644 index 0000000000..f0e6b1b4ec --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/VoxelDimensions.scala @@ -0,0 +1,49 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import geotrellis.raster.Dimensions +import geotrellis.geowave.dsl.json._ +import io.circe.generic.extras.ConfiguredJsonCodec + +@ConfiguredJsonCodec +sealed trait VoxelDimensions { self => + def width: Int + def height: Int + def toVoxelBounds: VoxelBounds + def toDimensions: Dimensions[Int] = Dimensions[Int](width, height) + def withTilingBounds(tb: TilingBounds): VoxelDimensions +} + +@ConfiguredJsonCodec +case class VoxelDimensions2D(width: Int, height: Int) extends VoxelDimensions { + def toVoxelBounds: VoxelBounds2D = VoxelBounds2D(0, width, 0, height) + def withTilingBounds(tb: TilingBounds): VoxelDimensions2D = + VoxelDimensions2D(tb.width.getOrElse(width), tb.height.getOrElse(height)) +} +@ConfiguredJsonCodec +case class VoxelDimensions3D(width: Int, height: Int, depth: Int) extends VoxelDimensions { + def toVoxelBounds: VoxelBounds3D = VoxelBounds3D(0, width, 0, height, 0, depth) + def withTilingBounds(tb: TilingBounds): VoxelDimensions3D = + VoxelDimensions3D(tb.width.getOrElse(width), tb.height.getOrElse(height), tb.depth.getOrElse(depth)) +} +@ConfiguredJsonCodec +case class VoxelDimensions4D(width: Int, height: Int, depth: Int, spissitude: Int) extends VoxelDimensions { + def toVoxelBounds: VoxelBounds4D = VoxelBounds4D(0, width, 0, height, 0, depth, 0, spissitude) + def withTilingBounds(tb: TilingBounds): VoxelDimensions4D = + VoxelDimensions4D(tb.width.getOrElse(width), tb.height.getOrElse(height), tb.depth.getOrElse(depth), tb.spissitude.getOrElse(spissitude)) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/avro/GeometryCodecs.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/avro/GeometryCodecs.scala new file mode 100644 index 0000000000..2c5c47ba23 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/avro/GeometryCodecs.scala @@ -0,0 +1,101 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl.avro + +import geotrellis.store.avro._ +import geotrellis.vector.{Point, Polygon} +import org.apache.avro.generic.GenericRecord +import org.apache.avro.{Schema, SchemaBuilder} +import org.locationtech.jts.geom.Coordinate + +import scala.collection.JavaConverters._ + +trait GeometryCodecs { + implicit def coordinateCodec: AvroRecordCodec[Coordinate] = new AvroRecordCodec[Coordinate]() { + override def schema: Schema = SchemaBuilder + .record("Coordinate") + .namespace("org.locationtech.jts.geom") + .fields() + .name("x").`type`.doubleType().noDefault + .name("y").`type`.doubleType().noDefault + .name("z").`type`.doubleType().noDefault + .endRecord() + + override def encode(c: Coordinate, rec: GenericRecord): Unit = { + rec.put("x", c.getX) + rec.put("y", c.getY) + rec.put("z", c.getZ) + } + + override def decode(rec: GenericRecord): Coordinate = { + new Coordinate( + rec[Double]("x"), + rec[Double]("y"), + rec[Double]("z") + ) + } + } + + implicit def pointCodec: AvroRecordCodec[Point] = new AvroRecordCodec[Point] { + def schema = SchemaBuilder + .record("Point") + .namespace("geotrellis.vector") + .fields() + .name("coordinate").`type`(AvroRecordCodec[Coordinate].schema).noDefault + .name("srid").`type`.intType.noDefault + .endRecord() + + def encode(thing: Point, rec: GenericRecord): Unit = { + rec.put("coordinate", AvroRecordCodec[Coordinate].encode(thing.getCoordinate)) + rec.put("srid", thing.getSRID) + } + + def decode(rec: GenericRecord): Point = { + val coordinate = AvroRecordCodec[Coordinate].decode(rec[GenericRecord]("coordinate")) + val point = Point(coordinate) + point.setSRID(rec[Int]("srid")) + point + } + } + + implicit def polygonCodec: AvroRecordCodec[Polygon] = new AvroRecordCodec[Polygon] { + override def schema: Schema = SchemaBuilder + .record("Polygon") + .namespace("geotrellis.vector") + .fields + .name("coordinates").`type`.array.items(AvroRecordCodec[Coordinate].schema).noDefault + .name("srid").`type`.intType.noDefault + .endRecord + + override def encode(thing: Polygon, rec: GenericRecord): Unit = { + val coordinates = java.util.Arrays.asList(thing.getCoordinates.map(coordinateCodec.encode): _*) + rec.put("coordinates", coordinates) + rec.put("srid", thing.getSRID) + } + + override def decode(rec: GenericRecord): Polygon = { + val coordinates = rec.get("coordinates") + .asInstanceOf[java.util.Collection[GenericRecord]] + .asScala + .toArray[GenericRecord] + .map(coordinateCodec.decode) + val polygon = Polygon(coordinates) + polygon.setSRID(rec[Int]("srid")) + polygon + } + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/avro/Implicits.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/avro/Implicits.scala new file mode 100644 index 0000000000..8f9d60f842 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/avro/Implicits.scala @@ -0,0 +1,21 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl.avro + +trait Implicits extends GeometryCodecs with VoxelBoundsCodec with VoxelDimensionsCodec + +object Implicits extends Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/avro/VoxelBoundsCodec.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/avro/VoxelBoundsCodec.scala new file mode 100644 index 0000000000..b444ddd7b7 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/avro/VoxelBoundsCodec.scala @@ -0,0 +1,120 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl.avro + +import geotrellis.geowave.dsl._ +import geotrellis.store.avro._ +import org.apache.avro.SchemaBuilder +import org.apache.avro.generic.GenericRecord + +trait VoxelBoundsCodec { + implicit def voxelBounds2DCodec: AvroRecordCodec[VoxelBounds2D] = new AvroRecordCodec[VoxelBounds2D] { + def schema = SchemaBuilder + .record("VoxelBounds2D").namespace("geotrellis.geowave.dsl") + .fields() + .name("colMin").`type`.intType().noDefault + .name("colMax").`type`.intType().noDefault + .name("rowMin").`type`.intType().noDefault + .name("rowMax").`type`.intType().noDefault + .endRecord() + + def encode(bounds: VoxelBounds2D, rec: GenericRecord): Unit = { + rec.put("colMin", bounds.colMin) + rec.put("colMax", bounds.colMin) + rec.put("rowMin", bounds.rowMin) + rec.put("rowMax", bounds.rowMax) + } + + def decode(rec: GenericRecord): VoxelBounds2D = + VoxelBounds2D( + rec[Int]("colMin"), + rec[Int]("colMax"), + rec[Int]("rowMin"), + rec[Int]("rowMax") + ) + } + + implicit def voxelBounds3DCodec: AvroRecordCodec[VoxelBounds3D] = new AvroRecordCodec[VoxelBounds3D] { + def schema = SchemaBuilder + .record("VoxelBounds3D").namespace("geotrellis.geowave.dsl") + .fields() + .name("colMin").`type`.intType().noDefault + .name("colMax").`type`.intType().noDefault + .name("rowMin").`type`.intType().noDefault + .name("rowMax").`type`.intType().noDefault + .name("depthMin").`type`.intType().noDefault + .name("depthMax").`type`.intType().noDefault + .endRecord() + + def encode(bounds: VoxelBounds3D, rec: GenericRecord): Unit = { + rec.put("colMin", bounds.colMin) + rec.put("colMax", bounds.colMin) + rec.put("rowMin", bounds.rowMin) + rec.put("rowMax", bounds.rowMax) + rec.put("depthMin", bounds.depthMin) + rec.put("depthMax", bounds.depthMax) + } + + def decode(rec: GenericRecord): VoxelBounds3D = + VoxelBounds3D( + rec[Int]("colMin"), + rec[Int]("colMax"), + rec[Int]("rowMin"), + rec[Int]("rowMax"), + rec[Int]("depthMin"), + rec[Int]("depthMax") + ) + } + + implicit def voxelBounds4DCodec: AvroRecordCodec[VoxelBounds4D] = new AvroRecordCodec[VoxelBounds4D] { + def schema = SchemaBuilder + .record("VoxelBounds4D").namespace("geotrellis.geowave.dsl") + .fields() + .name("colMin").`type`.intType().noDefault + .name("colMax").`type`.intType().noDefault + .name("rowMin").`type`.intType().noDefault + .name("rowMax").`type`.intType().noDefault + .name("depthMin").`type`.intType().noDefault + .name("depthMax").`type`.intType().noDefault + .name("spissitudeMin").`type`.intType().noDefault + .name("spissitudeMax").`type`.intType().noDefault + .endRecord() + + def encode(bounds: VoxelBounds4D, rec: GenericRecord): Unit = { + rec.put("colMin", bounds.colMin) + rec.put("colMax", bounds.colMin) + rec.put("rowMin", bounds.rowMin) + rec.put("rowMax", bounds.rowMax) + rec.put("depthMin", bounds.depthMin) + rec.put("depthMax", bounds.depthMax) + rec.put("spissitudeMin", bounds.spissitudeMin) + rec.put("spissitudeMax", bounds.spissitudeMax) + } + + def decode(rec: GenericRecord): VoxelBounds4D = + VoxelBounds4D( + rec[Int]("colMin"), + rec[Int]("colMax"), + rec[Int]("rowMin"), + rec[Int]("rowMax"), + rec[Int]("depthMin"), + rec[Int]("depthMax"), + rec[Int]("spissitudeMin"), + rec[Int]("spissitudeMax") + ) + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/avro/VoxelDimensionsCodec.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/avro/VoxelDimensionsCodec.scala new file mode 100644 index 0000000000..5570b90839 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/avro/VoxelDimensionsCodec.scala @@ -0,0 +1,93 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl.avro + +import geotrellis.geowave.dsl._ +import geotrellis.store.avro._ +import org.apache.avro.SchemaBuilder +import org.apache.avro.generic.GenericRecord + +trait VoxelDimensionsCodec { + implicit def voxelDimensions2DCodec: AvroRecordCodec[VoxelDimensions2D] = new AvroRecordCodec[VoxelDimensions2D] { + def schema = SchemaBuilder + .record("VoxelDimensions2D").namespace("geotrellis.geowave.dsl") + .fields() + .name("width").`type`.intType().noDefault + .name("height").`type`.intType().noDefault + .endRecord() + + def encode(bounds: VoxelDimensions2D, rec: GenericRecord): Unit = { + rec.put("width", bounds.width) + rec.put("height", bounds.height) + } + + def decode(rec: GenericRecord): VoxelDimensions2D = + VoxelDimensions2D( + rec[Int]("width"), + rec[Int]("height") + ) + } + + implicit def voxelDimensions3DCodec: AvroRecordCodec[VoxelDimensions3D] = new AvroRecordCodec[VoxelDimensions3D] { + def schema = SchemaBuilder + .record("VoxelDimensions3D").namespace("geotrellis.geowave.dsl") + .fields() + .name("width").`type`.intType().noDefault + .name("height").`type`.intType().noDefault + .name("depth").`type`.intType().noDefault + .endRecord() + + def encode(bounds: VoxelDimensions3D, rec: GenericRecord): Unit = { + rec.put("width", bounds.width) + rec.put("height", bounds.height) + rec.put("depth", bounds.depth) + } + + def decode(rec: GenericRecord): VoxelDimensions3D = + VoxelDimensions3D( + rec[Int]("width"), + rec[Int]("height"), + rec[Int]("depth") + ) + } + + implicit def voxelDimensions4DCodec: AvroRecordCodec[VoxelDimensions4D] = new AvroRecordCodec[VoxelDimensions4D] { + def schema = SchemaBuilder + .record("VoxelDimensions4D").namespace("com.azavea.api") + .fields() + .name("width").`type`.intType().noDefault + .name("height").`type`.intType().noDefault + .name("depth").`type`.intType().noDefault + .name("spissitude").`type`.intType().noDefault + .endRecord() + + def encode(bounds: VoxelDimensions4D, rec: GenericRecord): Unit = { + rec.put("width", bounds.width) + rec.put("height", bounds.height) + rec.put("depth", bounds.depth) + rec.put("spissitude", bounds.spissitude) + } + + def decode(rec: GenericRecord): VoxelDimensions4D = + VoxelDimensions4D( + rec[Int]("width"), + rec[Int]("height"), + rec[Int]("depth"), + rec[Int]("spissitude") + ) + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/avro/package.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/avro/package.scala new file mode 100644 index 0000000000..c40bd2077b --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/avro/package.scala @@ -0,0 +1,19 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +package object avro extends avro.Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/json/Implicits.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/json/Implicits.scala new file mode 100644 index 0000000000..570fa48191 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/json/Implicits.scala @@ -0,0 +1,88 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl.json + +import java.net.URI +import java.time.Instant + +import cats.syntax.either._ +import geotrellis.geowave.adapter.{DataType, DataTypeRegistry} +import io.circe._ +import io.circe.generic.extras.Configuration +import io.circe.syntax._ +import io.estatico.newtype.Coercible +import org.locationtech.geowave.core.geotime.store.dimension.Time.TimeRange +import org.locationtech.geowave.core.index.sfc.data.NumericRange +import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter + +trait Implicits { + implicit val customConfig: Configuration = Configuration.default.withDiscriminator("classType") + + /** Derive circe codecs for newtypes. */ + implicit def coercibleEncoder[R, N](implicit ev: Coercible[Encoder[R], Encoder[N]], R: Encoder[R]): Encoder[N] = ev(R) + implicit def coercibleDecoder[R, N](implicit ev: Coercible[Decoder[R], Decoder[N]], R: Decoder[R]): Decoder[N] = ev(R) + + /** Overriding auto derived newtype codec to throw a more informative error. */ + implicit val dataTypeDecoder: Decoder[DataType] = Decoder.decodeString.emap { str => + DataTypeRegistry + .find(str) + .fold(s"Invalid DataType: $str; available DataTypes: ${DataTypeRegistry.supportedTypes.mkString(", ")}".asLeft[DataType])(_.asRight) + } + + /** Other Circe codecs */ + implicit val uriEncoder: Encoder[URI] = Encoder.encodeString.contramap[URI](_.toString) + implicit val uriDecoder: Decoder[URI] = Decoder.decodeString.emap { str => + Either.catchNonFatal(URI.create(str)).leftMap(_ => s"Could not decode URI: $str") + } + + implicit val encodeFoo: Encoder[NumericRange] = { numericRange => + Json.obj("min" -> numericRange.getMin.asJson, "max" -> numericRange.getMax.asJson) + } + + implicit val decodeFoo: Decoder[NumericRange] = { c => + for { + min <- c.downField("min").as[Double] + max <- c.downField("max").as[Double] + } yield new NumericRange(min, max) + } + + implicit val encodeTimeRange: Encoder[TimeRange] = { range => + val data = range.toNumericData + Json.obj( + "min" -> Instant.ofEpochMilli(data.getMin.toLong).asJson, + "max" -> Instant.ofEpochMilli(data.getMax.toLong).asJson) + } + + implicit val decodeTimeRange: Decoder[TimeRange] = { c => + for { + min <- c.downField("min").as[Instant] + max <- c.downField("max").as[Instant] + } yield new TimeRange(min.toEpochMilli, max.toEpochMilli) + } + + implicit val basicQueryCompareOperationEncoder: Encoder[BasicQueryFilter.BasicQueryCompareOperation] = + Encoder.encodeString.contramap[BasicQueryFilter.BasicQueryCompareOperation](_.toString) + + implicit val basicQueryCompareOperationDecoder: Decoder[BasicQueryFilter.BasicQueryCompareOperation] = + Decoder.decodeString.emap { str => + Either + .catchNonFatal(BasicQueryFilter.BasicQueryCompareOperation.valueOf(str)) + .leftMap(_ => s"Could not decode BasicQueryCompareOperation: $str") + } +} + +object Implicits extends Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/json/JsonValidator.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/json/JsonValidator.scala new file mode 100644 index 0000000000..98e4be3195 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/json/JsonValidator.scala @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl.json + +import cats.data.Validated +import cats.effect.Sync +import cats.syntax.either._ +import io.circe.Json +import io.circe.schema.Schema + +trait JsonValidator[T] { + def validate(json: Json): Either[JsonValidatorErrors, T] + def parseAndValidate(str: String): Either[JsonValidatorErrors, T] = + io.circe.parser.parse(str).leftMap(JsonValidatorErrors(_)).flatMap(validate) +} + +object JsonValidator { + private def readJson(resource: String): Json = { + val stream = getClass.getResourceAsStream(resource) + try { + val lines = scala.io.Source.fromInputStream(stream).getLines() + val json = lines.mkString(" ") + io.circe.parser.parse(json).valueOr(throw _) + } finally stream.close() + } + + private lazy val messageHeaderSchema = Schema.load(readJson("/json/message.schema.json")) + private lazy val indexParametersSchema = Schema.load(readJson("/json/index-message.schema.json")) + private lazy val ingestParametersSchema = Schema.load(readJson("/json/ingest-message.schema.json")) + private lazy val deleteParametersSchema = Schema.load(readJson("/json/delete-message.schema.json")) + + private def validateSchema(schema: Schema, json: Json): Validated[JsonValidatorErrors, Unit] = schema.validate(json).leftMap(JsonValidatorErrors(_)) + def validateMessageHeader(json: Json): Validated[JsonValidatorErrors, Unit] = validateSchema(messageHeaderSchema, json) + def validateIngestParameters(json: Json): Validated[JsonValidatorErrors, Unit] = validateSchema(ingestParametersSchema, json) + def validateIndexParameters(json: Json): Validated[JsonValidatorErrors, Unit] = validateSchema(indexParametersSchema, json) + def validateDeleteParameters(json: Json): Validated[JsonValidatorErrors, Unit] = validateSchema(deleteParametersSchema, json) + + def apply[T](str: String)(implicit validator: JsonValidator[T]) = validator + def parse[T: JsonValidator](str: String) = apply(str).parseAndValidate(str) + def parseF[F[_]: Sync, T: JsonValidator](str: String) = Sync[F].fromEither(apply(str).parseAndValidate(str)) + def parseUnsafe[T: JsonValidator](str: String) = parse[T](str).valueOr(throw _) + def validate[T](json: Json)(implicit validator: JsonValidator[T]) = validator.validate(json) + def validateUnsafe[T: JsonValidator](json: Json) = validate[T](json).valueOr(throw _) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/json/JsonValidatorErrors.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/json/JsonValidatorErrors.scala new file mode 100644 index 0000000000..0b1ba91e99 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/json/JsonValidatorErrors.scala @@ -0,0 +1,33 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl.json + +import cats.data.NonEmptyList +import io.circe.schema.ValidationError +import io.circe.{DecodingFailure, Error} + +final case class JsonValidatorErrors(errors: NonEmptyList[Error]) extends Exception { + def toList: List[Error] = errors.head :: errors.tail + + override def getMessage: String = errors.toList.map(_.getMessage).mkString("; ") + override def fillInStackTrace(): Throwable = this +} + +object JsonValidatorErrors { + def apply(err: Error): JsonValidatorErrors = JsonValidatorErrors(NonEmptyList.of(err)) + def apply(nel: NonEmptyList[ValidationError])(implicit di: DummyImplicit): JsonValidatorErrors = JsonValidatorErrors(nel.map(e => DecodingFailure(e.getMessage, Nil))) +} \ No newline at end of file diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/json/package.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/json/package.scala new file mode 100644 index 0000000000..68426ffa9e --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/json/package.scala @@ -0,0 +1,19 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +package object json extends json.Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/package.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/package.scala new file mode 100644 index 0000000000..a45a72f401 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/package.scala @@ -0,0 +1,61 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave + +import geotrellis.geowave.utils._ +import io.estatico.newtype.macros.newsubtype +import org.locationtech.geowave.core.cli.prefix.PrefixedJCommander +import org.locationtech.geowave.core.store.api.{DataStore, Index} +import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions +import org.locationtech.geowave.core.store.index.IndexPluginOptions + +package object dsl { + + /** Name of index type registered with GeoWave IndexPlugin SPI */ + @newsubtype case class IndexType(value: String) { + def getIndex(options: Map[String, String], indexName: Option[String] = None): Index = { + val outputIndexOptions: IndexPluginOptions = new IndexPluginOptions + outputIndexOptions.selectPlugin(value) + + if(options.nonEmpty) { + val opts = outputIndexOptions.getDimensionalityOptions + val commander = new PrefixedJCommander() + commander.addPrefixedObject(opts) + commander.parse(options.prefixedList: _*) + } + + indexName.foreach(outputIndexOptions.setName) + outputIndexOptions.createIndex() + } + } + + @newsubtype case class DataStoreType(value: String) { + def getDataStore(options: Map[String, String]): DataStore = { + val dataStorePluginOptions = new DataStorePluginOptions() + dataStorePluginOptions.selectPlugin(value) + + if(options.nonEmpty) { + val opts = dataStorePluginOptions.getFactoryOptions + val commander = new PrefixedJCommander() + commander.addPrefixedObject(opts) + commander.parse(options.prefixedList: _*) + } + + dataStorePluginOptions.createDataStore() + } + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/syntax/Implicits.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/syntax/Implicits.scala new file mode 100644 index 0000000000..e7c512c2b2 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/syntax/Implicits.scala @@ -0,0 +1,32 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl.syntax + +import geotrellis.geowave.adapter._ +import geotrellis.geowave.dsl._ + +trait Implicits { + implicit class NewtypeOps(val self: String) { + def indexType: IndexType = IndexType(self) + def typeName: TypeName = TypeName(self) + def indexFieldName: IndexFieldName = IndexFieldName(self) + def dataType: DataType = DataType(self) + def dataStoreType: DataStoreType = DataStoreType(self) + } +} + +object Implicits extends Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/dsl/syntax/package.scala b/geowave/src/main/scala/geotrellis/geowave/dsl/syntax/package.scala new file mode 100644 index 0000000000..be610a7b88 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/dsl/syntax/package.scala @@ -0,0 +1,19 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +package object syntax extends Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationIndexBuilder.scala b/geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationIndexBuilder.scala new file mode 100644 index 0000000000..af3ab4c6df --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationIndexBuilder.scala @@ -0,0 +1,27 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.index + + +import org.locationtech.geowave.core.store.api.Index +import org.locationtech.geowave.core.store.index.BaseIndexBuilder + +class SpatialTemporalElevationIndexBuilder( + val options: SpatialTemporalElevationOptions = new SpatialTemporalElevationOptions() +) extends BaseIndexBuilder[SpatialTemporalElevationIndexBuilder] { + def createIndex: Index = new SpatialTemporalElevationIndexTypeProvider().createIndex(options) +} \ No newline at end of file diff --git a/geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationIndexTypeProvider.scala b/geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationIndexTypeProvider.scala new file mode 100644 index 0000000000..a39281bd38 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationIndexTypeProvider.scala @@ -0,0 +1,129 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.index + +import com.beust.jcommander.IStringConverter +import geotrellis.geowave.index.dimension.ElevationDefinition +import geotrellis.geowave.index.field.ElevationField +import org.geotools.referencing.CRS +import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit +import org.locationtech.geowave.core.geotime.index.dimension.{LatitudeDefinition, LongitudeDefinition, TimeDefinition} +import org.locationtech.geowave.core.geotime.store.dimension._ +import org.locationtech.geowave.core.geotime.util.GeometryUtils +import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition +import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType +import org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexFactory +import org.locationtech.geowave.core.store.api.Index +import org.locationtech.geowave.core.store.dimension.NumericDimensionField +import org.locationtech.geowave.core.store.index.{BasicIndexModel, CustomNameIndex} +import org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi +import org.opengis.referencing.crs.CoordinateReferenceSystem +import spire.syntax.cfor._ + +class SpatialTemporalElevationIndexTypeProvider extends DimensionalityTypeProviderSpi[SpatialTemporalElevationOptions] { + import SpatialTemporalElevationIndexTypeProvider._ + + private val DEFAULT_SP_TEMP_ELV_ID_STR = "SP_TEMP_ELV_IDX" + + def getDimensions(options: SpatialTemporalElevationOptions): Array[NumericDimensionDefinition] = + Array( + new LongitudeDefinition(), + new LatitudeDefinition(true), + new TimeDefinition(Unit.YEAR), + new ElevationDefinition(minValue=0, maxValue=options.getMaxElevation) + ) + + def getSpatialTemporalFields(geometryPrecision: Int): Array[NumericDimensionField[_]] = + Array( + new LongitudeField(geometryPrecision), + new LatitudeField(geometryPrecision, true), + new TimeField(Unit.YEAR), + new ElevationField() + ) + + def getDimensionalityTypeName: String = "spatial_temporal_elevation" + + def getDimensionalityTypeDescription: String = "This dimensionality type matches all indices that only require Geometry, Time and Elevation." + + /** + * This is a modified copy of org.locationtech.geowave.core.geotime.ingest.SpatialTemporalDimensionalityTypeProvider::internalCreateIndex: + * https://github.com/locationtech/geowave/blob/v1.0.0/core/geotime/src/main/java/org/locationtech/geowave/core/geotime/ingest/SpatialTemporalDimensionalityTypeProvider.java#L94 + */ + def createIndex(options: SpatialTemporalElevationOptions): Index = { + val geometryPrecision: Int = options.getGeometryPrecision() + + val (dimensions, fields, isDefaultCRS, crsCode) = + if ((options.getCrs == null) || options.getCrs.isEmpty || options.getCrs.equalsIgnoreCase(GeometryUtils.DEFAULT_CRS_STR)) { + (getDimensions(options), getSpatialTemporalFields(geometryPrecision), true, "EPSG:4326") + } else { + val crs = decodeCRS(options.getCrs) + val cs = crs.getCoordinateSystem + val isDefaultCRS = false + val crsCode = options.getCrs + val dimensions = Array.ofDim[NumericDimensionDefinition](cs.getDimension + 2) + val fields = Array.ofDim[NumericDimensionField[_]](dimensions.length) + + cfor(0)(_ < dimensions.length - 2, _ + 1) { d => + val csa = cs.getAxis(d) + dimensions(d) = new CustomCRSBoundedSpatialDimension(d.toByte, csa.getMinimumValue, csa.getMaximumValue) + fields(d) = new CustomCRSSpatialField(dimensions(d).asInstanceOf[CustomCRSBoundedSpatialDimension], geometryPrecision) + } + + val elevationDefinition = new ElevationDefinition(minValue = 0, maxValue = options.getMaxElevation) + dimensions(dimensions.length - 2) = new TimeDefinition(options.getTemporalPerioidicty) + fields(dimensions.length - 2) = new TimeField(options.getTemporalPerioidicty) + dimensions(dimensions.length - 1) = elevationDefinition + fields(dimensions.length - 1) = new ElevationField(baseDefinition = elevationDefinition) + + (dimensions, fields, isDefaultCRS, crsCode) + } + + val indexModel: BasicIndexModel = + if (isDefaultCRS) new BasicIndexModel(fields) else new CustomCrsIndexModel(fields, crsCode) + + // index combinedId + val combinedId: String = + if (isDefaultCRS) s"${DEFAULT_SP_TEMP_ELV_ID_STR}_${options.getBias}_${options.getTemporalPerioidicty}_${options.getMaxElevation}" + else s"${DEFAULT_SP_TEMP_ELV_ID_STR}_${crsCode.substring(crsCode.indexOf(":") + 1)}_${options.getBias}_${options.getTemporalPerioidicty}_${options.getMaxElevation}" + + new CustomNameIndex( + XZHierarchicalIndexFactory.createFullIncrementalTieredStrategy( + dimensions, + Array[Int]( + options.getBias.getSpatialPrecision, + options.getBias.getSpatialPrecision, + options.getBias.getTemporalPrecision, + options.getBias.getSpatialPrecision + ), + SFCType.HILBERT, + options.getMaxDuplicates + ), + indexModel, + combinedId + ) + } + + def createOptions: SpatialTemporalElevationOptions = new SpatialTemporalElevationOptions() +} + +object SpatialTemporalElevationIndexTypeProvider { + class IntConverter extends IStringConverter[Int] { + def convert(value: String): Int = java.lang.Integer.parseUnsignedInt(value) + } + + def decodeCRS(crsCode: String): CoordinateReferenceSystem = CRS.decode(crsCode, true) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationOptions.scala b/geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationOptions.scala new file mode 100644 index 0000000000..5129fc0659 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/index/SpatialTemporalElevationOptions.scala @@ -0,0 +1,77 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.index + +import com.beust.jcommander.Parameter +import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.Bias +import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy +import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit +import org.locationtech.geowave.core.geotime.index.{CommonSpatialOptions, SpatialTemporalDimensionalityTypeProvider} + +class SpatialTemporalElevationOptions extends CommonSpatialOptions { + protected var DEFAULT_TEMPORAL_PERIODICITY = Unit.YEAR + protected var DEFAULT_MAX_ELEVATION = 32000 + + @Parameter( + names = Array("--maxElevation"), + required = false, + description = "The periodicity of the elevation dimension. Because time is continuous, it is binned at this interval.", + converter = classOf[SpatialTemporalElevationIndexTypeProvider.IntConverter] + ) + protected var maxElevation: Int = DEFAULT_MAX_ELEVATION + + @Parameter( + names = Array("--periodTemporal"), + required = false, + description = "The periodicity of the temporal dimension. Because time is continuous, it is binned at this interval.", + converter = classOf[SpatialTemporalDimensionalityTypeProvider.UnitConverter] + ) + protected var temporalPeriodicity: TemporalBinningStrategy.Unit = DEFAULT_TEMPORAL_PERIODICITY + + @Parameter( + names = Array("--bias"), + required = false, + description = "The bias of the spatial-temporal index. There can be more precision given to time or space if necessary.", + converter = classOf[SpatialTemporalDimensionalityTypeProvider.BiasConverter] + ) + protected var bias = Bias.BALANCED + + @Parameter( + names = Array("--maxDuplicates"), + required = false, + description = "The max number of duplicates per dimension range. The default is 2 per range (for example lines and polygon timestamp data would be up to 4 because its 2 dimensions, and line/poly time range data would be 8)." + ) + protected var maxDuplicates: Long = -1 + + def getBias: SpatialTemporalDimensionalityTypeProvider.Bias = bias + + def setBias(bias: SpatialTemporalDimensionalityTypeProvider.Bias) = this.bias = bias + + def getMaxDuplicates: Long = maxDuplicates + + def setMaxDuplicates(maxDuplicates: Long) = this.maxDuplicates = maxDuplicates + + def getTemporalPerioidicty: TemporalBinningStrategy.Unit = temporalPeriodicity + + def setTemporalPerioidicty(temporalPeriodicity: TemporalBinningStrategy.Unit) = this.temporalPeriodicity = temporalPeriodicity + + def getMaxElevation: Int = maxElevation + + def setMaxElevation(x: Int) = this.maxElevation = x + + override def getCrs: String = crs +} diff --git a/geowave/src/main/scala/geotrellis/geowave/index/dimension/Elevation.scala b/geowave/src/main/scala/geotrellis/geowave/index/dimension/Elevation.scala new file mode 100644 index 0000000000..62136b4e75 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/index/dimension/Elevation.scala @@ -0,0 +1,89 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.index.dimension + +import java.nio.ByteBuffer + +import geotrellis.geowave.utils.DoubleUtils +import org.locationtech.geowave.core.index.persist.Persistable +import org.locationtech.geowave.core.index.sfc.data.{NumericData, NumericRange} +import org.locationtech.geowave.core.index.{ByteArrayUtils, VarintUtils} +import org.locationtech.geowave.core.store.dimension.NumericDimensionField +import org.locationtech.geowave.core.store.index.CommonIndexValue + +/** Elevation common index value. + * @param minValue minimum elevation included in this index value, inclusive + * @param maxValue maximum elevation included in this index value, inclusive + */ +class Elevation( + private var minValue: Double = 0d, + private var maxValue: Double = 0d +) extends CommonIndexValue with Persistable { + + private var visibility: Array[Byte] = Array.empty + def getMinValue: Double = minValue + def getMaxValue: Double = maxValue + def getLength: Double = maxValue - minValue + + def toBinary: Array[Byte] = { + val bytes = + ByteBuffer.allocate( + DoubleUtils.doubleByteLength + + DoubleUtils.doubleByteLength + + VarintUtils.unsignedIntByteLength(visibility.length) + + visibility.length + ) + + DoubleUtils.writeDouble(minValue, bytes) + DoubleUtils.writeDouble(maxValue, bytes) + VarintUtils.writeUnsignedInt(visibility.length, bytes) + bytes.put(visibility) + + bytes.array() + } + + def fromBinary(bytes: Array[Byte]): Unit = { + val buf = ByteBuffer.wrap(bytes) + + minValue = DoubleUtils.readDouble(buf) + maxValue = DoubleUtils.readDouble(buf) + val length = VarintUtils.readUnsignedInt(buf) + visibility = ByteArrayUtils.safeRead(buf, length) + } + + def getVisibility: Array[Byte] = visibility + + def setVisibility(bytes: Array[Byte]): Unit = visibility = bytes + + def overlaps(field: Array[NumericDimensionField[_ <: CommonIndexValue]], rangeData: Array[NumericData]): Boolean = { + val nd = rangeData(0) + ((nd.getMin >= getMinValue) && (nd.getMax <= getMaxValue)) || + ((nd.getMin <= getMinValue) && (nd.getMax >= getMinValue)) || + ((nd.getMin <= getMaxValue) && (nd.getMax >= getMaxValue)) + } + + def toNumericData: NumericData = new NumericRange(minValue, maxValue) + + override def toString: String = s"Elevation($minValue, $maxValue)" +} + +object Elevation { + val DEFAULT_FIELD_NAME = "default_elevation_dimension" + def apply(minValue: Double, maxValue: Double): Elevation = new Elevation(minValue, maxValue) + def apply(value: Double): Elevation = new Elevation(value, value) + def apply(): Elevation = new Elevation() +} diff --git a/geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationDefinition.scala b/geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationDefinition.scala new file mode 100644 index 0000000000..defdb21bde --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationDefinition.scala @@ -0,0 +1,25 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.index.dimension + +import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition + +/** + * Elevation dimension definition. + * @note default maxValue assumes elevation will be stored in feet + */ +class ElevationDefinition(minValue: Double = 0 , maxValue: Double = 32000) extends BasicDimensionDefinition(minValue, maxValue) diff --git a/geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationReader.scala b/geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationReader.scala new file mode 100644 index 0000000000..2f403deffd --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationReader.scala @@ -0,0 +1,23 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.index.dimension + +import org.locationtech.geowave.core.store.data.field.FieldReader + +class ElevationReader extends FieldReader[Elevation] { + def readField(bytes: Array[Byte]): Elevation = { val depth = Elevation(); depth.fromBinary(bytes); depth } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationWriter.scala b/geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationWriter.scala new file mode 100644 index 0000000000..1c55fead83 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/index/dimension/ElevationWriter.scala @@ -0,0 +1,23 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.index.dimension + +import org.locationtech.geowave.core.store.data.field.FieldWriter + +class ElevationWriter extends FieldWriter[AnyRef, Elevation] { + def writeField(elevation: Elevation): Array[Byte] = elevation.toBinary +} diff --git a/geowave/src/main/scala/geotrellis/geowave/index/field/ElevationField.scala b/geowave/src/main/scala/geotrellis/geowave/index/field/ElevationField.scala new file mode 100644 index 0000000000..a4ff12bcac --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/index/field/ElevationField.scala @@ -0,0 +1,116 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.index.field + +import java.nio.ByteBuffer + +import geotrellis.geowave.index.dimension.{Elevation, ElevationDefinition, ElevationReader, ElevationWriter} +import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition +import org.locationtech.geowave.core.index.dimension.bin.BinRange +import org.locationtech.geowave.core.index.persist.PersistenceUtils +import org.locationtech.geowave.core.index.sfc.data.{NumericData, NumericRange} +import org.locationtech.geowave.core.index.{ByteArrayUtils, StringUtils, VarintUtils} +import org.locationtech.geowave.core.store.data.field.{FieldReader, FieldWriter} +import org.locationtech.geowave.core.store.dimension.NumericDimensionField + +class ElevationField( + private var reader: FieldReader[Elevation] = new ElevationReader(), + private var writer: FieldWriter[AnyRef, Elevation] = new ElevationWriter(), + private var fieldName: String = ElevationField.DEFAULT_FIELD_ID, + private var baseDefinition: NumericDimensionDefinition = new ElevationDefinition(0, 32000) +) extends NumericDimensionField[Elevation] { + + def getNumericData(dataElement: Elevation): NumericData = dataElement.toNumericData + + def getFieldName: String = fieldName + + def getWriter: FieldWriter[_, Elevation] = writer + + def getReader: FieldReader[Elevation] = reader + + def getBaseDefinition: NumericDimensionDefinition = baseDefinition + + def getRange: Double = baseDefinition.getRange + + def normalize(value: Double): Double = baseDefinition.normalize(value) + + def denormalize(value: Double): Double = baseDefinition.denormalize(value) + + def getNormalizedRanges(range: NumericData): Array[BinRange] = baseDefinition.getNormalizedRanges(range) + + def getDenormalizedRange(range: BinRange): NumericRange = baseDefinition.getDenormalizedRange(range) + + def getFixedBinIdSize: Int = baseDefinition.getFixedBinIdSize + + def getBounds: NumericRange = baseDefinition.getBounds + + def getFullRange: NumericData = new NumericRange(0, Int.MaxValue) + + def toBinary: Array[Byte] = { + val dimensionBinary = PersistenceUtils.toBinary(baseDefinition) + val fieldNameBytes = StringUtils.stringToBinary(fieldName) + val buf = + ByteBuffer.allocate( + dimensionBinary.length + + fieldNameBytes.length + + VarintUtils.unsignedIntByteLength(fieldNameBytes.length)) + VarintUtils.writeUnsignedInt(fieldNameBytes.length, buf) + buf.put(fieldNameBytes) + buf.put(dimensionBinary) + buf.array() + } + + def fromBinary(bytes: Array[Byte]): Unit = { + val buf = ByteBuffer.wrap(bytes) + val fieldNameLength = VarintUtils.readUnsignedInt(buf) + val fieldNameBinary = ByteArrayUtils.safeRead(buf, fieldNameLength) + fieldName = StringUtils.stringFromBinary(fieldNameBinary) + + val dimensionBinary = Array.ofDim[Byte](buf.remaining) + buf.get(dimensionBinary) + + baseDefinition = PersistenceUtils.fromBinary(dimensionBinary).asInstanceOf[NumericDimensionDefinition] + } + + override def hashCode: Int = { + val prime = 31 + var result = 1 + result = (prime * result) + (if (baseDefinition == null) 0 else baseDefinition.hashCode) + result = (prime * result) + (if (fieldName == null) 0 else fieldName.hashCode) + result + } + + override def equals(obj: Any): Boolean = { + if (this eq obj.asInstanceOf[AnyRef]) return true + if (obj == null) return false + if (getClass ne obj.getClass) return false + val other = obj.asInstanceOf[ElevationField] + if (baseDefinition == null) if (other.baseDefinition != null) return false + else if (!(baseDefinition == other.baseDefinition)) return false + if (fieldName == null) if (other.fieldName != null) return false + else if (!(fieldName == other.fieldName)) return false + true + } +} + +object ElevationField { + val DEFAULT_FIELD_ID = "default_elevation_dimension" + + def apply(): ElevationField = new ElevationField() + def apply(maxValue: Double): ElevationField = new ElevationField(baseDefinition = new ElevationDefinition(0, maxValue.toInt)) + def apply(maxValue: Int): ElevationField = new ElevationField(baseDefinition = new ElevationDefinition(0, maxValue)) +} diff --git a/geowave/src/main/scala/geotrellis/geowave/index/query/ExplicitSpatialElevationQuery.scala b/geowave/src/main/scala/geotrellis/geowave/index/query/ExplicitSpatialElevationQuery.scala new file mode 100644 index 0000000000..f60504eb9f --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/index/query/ExplicitSpatialElevationQuery.scala @@ -0,0 +1,44 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.index.query + +import geotrellis.geowave.index.dimension.ElevationDefinition +import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery +import org.locationtech.geowave.core.geotime.util.GeometryUtils +import org.locationtech.geowave.core.index.sfc.data.NumericRange +import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.{ConstraintData, ConstraintSet, ConstraintsByClass} +import org.locationtech.jts.geom.Geometry + +class ExplicitSpatialElevationQuery(constraints: ConstraintsByClass, geometry: Geometry) extends ExplicitSpatialQuery(constraints, geometry) + +object ExplicitSpatialElevationQuery { + def apply(elevation: Double, queryGeometry: Geometry): ExplicitSpatialElevationQuery = + apply(elevation, elevation, queryGeometry) + + def apply(minElevation: Double, maxElevation: Double, queryGeometry: Geometry): ExplicitSpatialElevationQuery = { + val geoConstraints = GeometryUtils.basicConstraintsFromGeometry(queryGeometry) + + val depthConstraints = new ConstraintsByClass( + new ConstraintSet( + new ConstraintData(new NumericRange(minElevation, maxElevation), false), + classOf[ElevationDefinition] + ) + ) + + new ExplicitSpatialElevationQuery(geoConstraints.merge(depthConstraints), queryGeometry) + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/index/query/ExplicitSpatialTemporalElevationQuery.scala b/geowave/src/main/scala/geotrellis/geowave/index/query/ExplicitSpatialTemporalElevationQuery.scala new file mode 100644 index 0000000000..5a64aa3194 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/index/query/ExplicitSpatialTemporalElevationQuery.scala @@ -0,0 +1,53 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.index.query + +import java.util.Date + +import geotrellis.geowave.index.dimension.ElevationDefinition +import org.locationtech.geowave.core.geotime.index.dimension.{SimpleTimeDefinition, TimeDefinition} +import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery +import org.locationtech.geowave.core.geotime.util.GeometryUtils +import org.locationtech.geowave.core.index.sfc.data.NumericRange +import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.{ConstraintData, ConstraintSet, ConstraintsByClass} +import org.locationtech.jts.geom.Geometry + +class ExplicitSpatialTemporalElevationQuery(constraints: ConstraintsByClass, geometry: Geometry) extends ExplicitSpatialQuery(constraints, geometry) + +object ExplicitSpatialTemporalElevationQuery { + def apply(depth: Double, startTime: Date, endTime: Date, queryGeometry: Geometry): ExplicitSpatialTemporalElevationQuery = + apply(depth, depth, startTime, endTime, queryGeometry) + + def apply(minElevation: Double, maxElevation: Double, minTime: Date, maxTime: Date, queryGeometry: Geometry): ExplicitSpatialTemporalElevationQuery = { + val geoConstraints = GeometryUtils.basicConstraintsFromGeometry(queryGeometry) + val temporalConstraints = new ConstraintsByClass( + new ConstraintSet( + new ConstraintData(new NumericRange(minTime.getTime.toDouble, maxTime.getTime.toDouble), false), + classOf[TimeDefinition], + classOf[SimpleTimeDefinition] + ) + ) + val depthConstraints = new ConstraintsByClass( + new ConstraintSet( + new ConstraintData(new NumericRange(minElevation, maxElevation), false), + classOf[ElevationDefinition] + ) + ) + + new ExplicitSpatialTemporalElevationQuery(geoConstraints.merge(temporalConstraints).merge(depthConstraints), queryGeometry) + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/ingest/ConfigureIndex.scala b/geowave/src/main/scala/geotrellis/geowave/ingest/ConfigureIndex.scala new file mode 100644 index 0000000000..f347dc0cb7 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/ingest/ConfigureIndex.scala @@ -0,0 +1,49 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.ingest + +import cats.data.Validated.Invalid +import cats.effect.Sync +import geotrellis.geowave.adapter.GeoTrellisDataAdapter +import geotrellis.geowave.dsl._ +import org.locationtech.geowave.core.store.api.Index +import cats.data.Validated.Valid + +object ConfigureIndex { + private final val logger = org.slf4j.LoggerFactory.getLogger(this.getClass()) + + def apply(params: IndexParameters): List[Index] = { + val adapter = GeoTrellisDataAdapter.load(params.dataType, params.typeName) + val indices = params.indices.map { indexDefinition => + val index = indexDefinition.getIndex + adapter.validateIndexCompatability(index) + .leftMap{ msg => + s"'${indexDefinition.indexType}' index type is incompatable with '${adapter.getTypeName}' adapter: $msg" + } + } + val errors = indices.collect { case Invalid(e) => e } + if (errors.nonEmpty) throw new IllegalArgumentException(errors.mkString("; ")) + val validIndeces = indices.collect { case Valid(i) => i} + + // and persist them; so the name is valid for adapter type + a list of indices + params.dataStore.addType(adapter, validIndeces: _*) + logger.info(s"Created index: ${validIndeces.map(_.getName)}") + validIndeces + } + + def applyF[F[_]: Sync](params: IndexParameters): F[List[Index]] = Sync[F].delay(apply(params)) +} diff --git a/geowave/src/test/scala/geotrellis/spark/GeoWaveTestEnvironment.scala b/geowave/src/main/scala/geotrellis/geowave/ingest/ExecuteQuery.scala similarity index 54% rename from geowave/src/test/scala/geotrellis/spark/GeoWaveTestEnvironment.scala rename to geowave/src/main/scala/geotrellis/geowave/ingest/ExecuteQuery.scala index 87c45caa79..00c02f52e3 100644 --- a/geowave/src/test/scala/geotrellis/spark/GeoWaveTestEnvironment.scala +++ b/geowave/src/main/scala/geotrellis/geowave/ingest/ExecuteQuery.scala @@ -1,5 +1,5 @@ /* - * Copyright 2016 Azavea + * Copyright 2020 Azavea * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,18 +14,18 @@ * limitations under the License. */ -package geotrellis.spark +package geotrellis.geowave.ingest -import geotrellis.spark.store.kryo.GeoWaveKryoRegistrator -import geotrellis.spark.testkit.TestEnvironment +import cats.effect.Sync +import geotrellis.geowave.dsl._ +import org.locationtech.geowave.core.store.api.Query -import org.apache.spark.SparkConf -import org.scalatest._ - -trait GeoWaveTestEnvironment extends TestEnvironment { self: Suite => - override def setKryoRegistrator(conf: SparkConf) = { - conf - .set("spark.kryo.registrator", classOf[GeoWaveKryoRegistrator].getName) - .set("spark.kryo.registrationRequired", "false") +object ExecuteQuery { + def delete(queryConfiguration: QueryConfiguration): Query[_] = { + val query = queryConfiguration.query + queryConfiguration.dataStore.delete(query) + query } + + def syncDelete[F[_]: Sync](queryConfiguration: QueryConfiguration): F[Query[_]] = Sync[F].delay(delete(queryConfiguration)) } diff --git a/geowave/src/main/scala/geotrellis/geowave/ingest/IngestGeoTiff.scala b/geowave/src/main/scala/geotrellis/geowave/ingest/IngestGeoTiff.scala new file mode 100644 index 0000000000..888a77289b --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/ingest/IngestGeoTiff.scala @@ -0,0 +1,91 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.ingest + +import java.net.URI + +import cats.effect.{IO, Sync} +import cats.instances.list._ +import cats.syntax.flatMap._ +import cats.syntax.functor._ +import cats.syntax.traverse._ +import geotrellis.geowave.dsl._ +import geotrellis.raster.MultibandTile +import geotrellis.raster.geotiff.GeoTiffRasterSource +import geotrellis.raster.io.geotiff.{GeoTiff, MultibandGeoTiff} +import org.locationtech.geowave.core.store.api.WriteResults +import org.slf4j.LoggerFactory + +object IngestGeoTiff { + private val logger = LoggerFactory.getLogger(this.getClass()) + + def apply(params: IngestParameters[TilingBounds]): List[WriteResults] = + sync[IO](params).unsafeRunSync() + + def sync[F[_]: Sync]( + params: IngestParameters[TilingBounds] + ): F[List[WriteResults]] = { + logger.info(s"Reading: ${params.uri} as ${params.dataType} with ${params.options} options") + val reader = dataTypeReader[F] + val values = reader.read(params.uri, params.options) + + values.map { chunks => + val writer = params.writer[GeoTiff[MultibandTile]] + if (writer == null) throw new RuntimeException(s"No writer for ${params.typeName}") + + try { + val results = chunks.map(writer.write).toList + logger.info(s"Wrote: ${params.uri} to ${params.typeName}") + results + } + finally writer.close() + } + } + + def dataTypeReader[F[_]: Sync]: DataTypeReader[F, TilingBounds, GeoTiff[MultibandTile]] = + new DataTypeReader[F, TilingBounds, GeoTiff[MultibandTile]] { + def read(uri: URI, options: Option[TilingBounds]): F[Iterator[GeoTiff[MultibandTile]]] = + metadata(uri) >>= { + _ + .split(options.getOrElse(TilingBounds())) + .map { md => readVoxelBounds(uri, md.bounds) } + .toList + .sequence + .map(_.iterator.flatten) + } + + private + def readVoxelBounds(uri: URI, bounds: VoxelBounds3D): F[Iterator[GeoTiff[MultibandTile]]] = Sync[F].delay { + val rs = GeoTiffRasterSource(uri.toString) + rs + .read(bounds.toGridBounds.toGridType[Long], bounds.depthMin to bounds.depthMax) + .map(MultibandGeoTiff(_, rs.crs, rs.metadata.tags)) + .iterator + } + + private + def metadata(uri: URI): F[IngestGeoTiffMetadata] = Sync[F].delay { + val rs = GeoTiffRasterSource(uri.toString) + + IngestGeoTiffMetadata( + VoxelDimensions3D(rs.cols.toInt, rs.rows.toInt, rs.bandCount).toVoxelBounds, + rs.extent, + rs.metadata + ) + } + } +} diff --git a/geowave/src/main/scala/geotrellis/geowave/ingest/IngestGeoTiffMetadata.scala b/geowave/src/main/scala/geotrellis/geowave/ingest/IngestGeoTiffMetadata.scala new file mode 100644 index 0000000000..7403724a5c --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/ingest/IngestGeoTiffMetadata.scala @@ -0,0 +1,43 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.ingest + +import geotrellis.geowave.dsl.{Metadata, TilingBounds, VoxelBounds3D, VoxelDimensions3D} +import geotrellis.raster.geotiff.GeoTiffMetadata +import geotrellis.raster.split.Split +import geotrellis.vector.Extent + +case class IngestGeoTiffMetadata( + bounds: VoxelBounds3D, + extent: Extent, + metadata: GeoTiffMetadata +) extends Metadata { + type VoxelBoundsInternal = VoxelBounds3D + + def split(tb: TilingBounds): Seq[IngestGeoTiffMetadata] = split(bounds.toVoxelDimensions.withTilingBounds(tb)) + def split(tb: TilingBounds, options: Split.Options): Seq[IngestGeoTiffMetadata] = split(bounds.toVoxelDimensions.withTilingBounds(tb), options) + def split(dims: VoxelDimensions3D): Seq[IngestGeoTiffMetadata] = split(dims, Split.Options.DEFAULT) + def split(dims: VoxelDimensions3D, options: Split.Options): Seq[IngestGeoTiffMetadata] = { + val mt = mapKeyTransform + bounds + .split(dims, options) + .map { bounds => + if(this.bounds.toGridBounds == bounds.toGridBounds) this.copy(bounds = bounds) + else this.copy(bounds = bounds, extent = mt.boundsToExtent(bounds.toGridBounds)) + } + } +} \ No newline at end of file diff --git a/geowave/src/main/scala/geotrellis/geowave/package.scala b/geowave/src/main/scala/geotrellis/geowave/package.scala new file mode 100644 index 0000000000..c0350590a3 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/package.scala @@ -0,0 +1,19 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis + +package object geowave extends Implicits diff --git a/geowave/src/main/scala/geotrellis/geowave/utils/DoubleUtils.scala b/geowave/src/main/scala/geotrellis/geowave/utils/DoubleUtils.scala new file mode 100644 index 0000000000..968c783461 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/utils/DoubleUtils.scala @@ -0,0 +1,28 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.utils + +import java.nio.ByteBuffer + +object DoubleUtils { + val doubleByteLength: Int = 8 + + def toBinary(value: Double): Array[Byte] = ByteBuffer.wrap(Array.ofDim[Byte](doubleByteLength)).putDouble(value).array() + def fromBinary(bytes: Array[Byte]): Double = ByteBuffer.wrap(bytes).getDouble() + def writeDouble(value: Double, buffer: ByteBuffer): Unit = buffer.putDouble(value) + def readDouble(buffer: ByteBuffer): Double = buffer.getDouble +} diff --git a/geowave/src/main/scala/geotrellis/geowave/utils/GeodesicUtils.scala b/geowave/src/main/scala/geotrellis/geowave/utils/GeodesicUtils.scala new file mode 100644 index 0000000000..c15eae8b04 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/utils/GeodesicUtils.scala @@ -0,0 +1,39 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.utils + +import geotrellis.vector.Point +import squants.space._ + +object GeodesicUtils { + def pointFromOriginDistanceBearing(originLatLng: Point, distance: Length, bearingAngle: Angle): Point = { + val earthRadius = 6371000 // meters + + val bearingRadians = bearingAngle.toRadians + val latRadians = math.toRadians(originLatLng.getY) + val lngRadians = math.toRadians(originLatLng.getX) + val dMeters = distance.toMeters + + val latRadians2 = + math.asin(math.sin(latRadians) * math.cos(dMeters/earthRadius) + math.cos(latRadians)*math.sin(dMeters/earthRadius)*math.cos(bearingRadians)) + val lngRadians2 = + lngRadians + math.atan2(math.sin(bearingRadians) * math.sin(dMeters/earthRadius) * math.cos(latRadians), + math.cos(dMeters/earthRadius) - math.sin(latRadians) * math.sin(latRadians)) + + Point(math.toDegrees(lngRadians2), math.toDegrees(latRadians2)) + } +} \ No newline at end of file diff --git a/geowave/src/main/scala/geotrellis/geowave/utils/ListUtils.scala b/geowave/src/main/scala/geotrellis/geowave/utils/ListUtils.scala new file mode 100644 index 0000000000..14812f7db2 --- /dev/null +++ b/geowave/src/main/scala/geotrellis/geowave/utils/ListUtils.scala @@ -0,0 +1,55 @@ +/* + * Copyright 2020 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.utils + +import java.nio.ByteBuffer + +import org.locationtech.geowave.core.index.{ByteArrayUtils, StringUtils, VarintUtils} + +import scala.collection.mutable.ListBuffer + +object ListUtils { + def stringsToBinary(list: List[String]): Array[Byte] = { + if (list.isEmpty) Array() + else { + val intermediate: Seq[(Int, Array[Byte])] = list.map { e => + val arr = StringUtils.stringToBinary(e) + arr.length -> arr + } + + val byteCount = intermediate.map { case (length, _) => length + VarintUtils.unsignedIntByteLength(length) }.sum + val buf = ByteBuffer.allocate(byteCount) + intermediate.foreach { case (length, arr) => VarintUtils.writeUnsignedInt(length, buf); buf.put(arr) } + buf.array + } + } + + def stringsFromBinary(bytes: Array[Byte]): List[String] = { + if (bytes.isEmpty) Nil + else { + val buf = ByteBuffer.wrap(bytes) + val list = ListBuffer[String]() + + while (buf.hasRemaining) { + val length = VarintUtils.readUnsignedInt(buf) + list += StringUtils.stringFromBinary(ByteArrayUtils.safeRead(buf, length)) + } + + list.toList + } + } +} diff --git a/geowave/src/test/scala/geotrellis/spark/store/geowave/GeoWaveAttributeStoreSpec.scala b/geowave/src/main/scala/geotrellis/geowave/utils/package.scala similarity index 53% rename from geowave/src/test/scala/geotrellis/spark/store/geowave/GeoWaveAttributeStoreSpec.scala rename to geowave/src/main/scala/geotrellis/geowave/utils/package.scala index e0d03cb7a6..c8d9eaedb2 100644 --- a/geowave/src/test/scala/geotrellis/spark/store/geowave/GeoWaveAttributeStoreSpec.scala +++ b/geowave/src/main/scala/geotrellis/geowave/utils/package.scala @@ -1,5 +1,5 @@ /* - * Copyright 2016 Azavea + * Copyright 2020 Azavea * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,28 +14,10 @@ * limitations under the License. */ -package geotrellis.spark.store.geowave +package geotrellis.geowave -import geotrellis.spark.store.AttributeStoreSpec -import org.scalatest.BeforeAndAfter - -class GeoWaveAttributeStoreSpec - extends AttributeStoreSpec { - - private def clear: Unit = - attributeStore - .layerIds - .foreach(attributeStore.delete(_)) - - lazy val attributeStore = new GeoWaveAttributeStore( - "leader:21810", - "instance", - "root", - "password", - "TEST" - ) - - it("should clean up after itself") { - clear +package object utils { + implicit class MapStringOps(val map: Map[String, String]) { + def prefixedList: List[String] = map.toList.flatMap { case (k, v) => List(s"--$k", v) } } } diff --git a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveAttributeStore.scala b/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveAttributeStore.scala deleted file mode 100644 index 61b8823c77..0000000000 --- a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveAttributeStore.scala +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Copyright 2016 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package geotrellis.spark.store.geowave - -import geotrellis.vector.Extent -import geotrellis.geotools._ -import geotrellis.proj4.LatLng -import geotrellis.raster._ -import geotrellis.layer._ -import geotrellis.store._ -import geotrellis.store.accumulo.AccumuloAttributeStore -import geotrellis.spark._ -import geotrellis.spark.store._ -import geotrellis.util._ -import geotrellis.util.annotations.experimental - -import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter -import mil.nga.giat.geowave.core.geotime.ingest._ -import mil.nga.giat.geowave.core.geotime.store.statistics.BoundingBoxDataStatistics -import mil.nga.giat.geowave.core.index.ByteArrayId -import mil.nga.giat.geowave.core.index.HierarchicalNumericIndexStrategy -import mil.nga.giat.geowave.core.index.HierarchicalNumericIndexStrategy.SubStrategy -import mil.nga.giat.geowave.core.store._ -import mil.nga.giat.geowave.core.store.index.{CustomIdIndex, PrimaryIndex} -import mil.nga.giat.geowave.core.store.operations.remote.options.DataStorePluginOptions -import mil.nga.giat.geowave.core.store.query.QueryOptions -import mil.nga.giat.geowave.datastore.accumulo._ -import mil.nga.giat.geowave.datastore.accumulo.metadata._ -import mil.nga.giat.geowave.datastore.accumulo.operations.config.AccumuloRequiredOptions -import mil.nga.giat.geowave.mapreduce.input.{GeoWaveInputKey, GeoWaveInputFormat} - -import org.apache.accumulo.core.client.security.tokens.PasswordToken -import org.apache.accumulo.core.client.ZooKeeperInstance -import org.log4s._ -import org.locationtech.jts.geom._ -import _root_.io.circe._ - -import scala.collection.JavaConverters._ - -/** - * @define experimental EXPERIMENTAL@experimental - */ -@experimental object GeoWaveAttributeStore { - - /** $experimental */ - @experimental def accumuloRequiredOptions( - zookeepers: String, - accumuloInstance: String, - accumuloUser: String, - accumuloPass: String, - geowaveNamespace: String - ): AccumuloRequiredOptions = { - val aro = new AccumuloRequiredOptions - aro.setZookeeper(zookeepers) - aro.setInstance(accumuloInstance) - aro.setUser(accumuloUser) - aro.setPassword(accumuloPass) - aro.setGeowaveNamespace(geowaveNamespace) - aro - } - - /** $experimental */ - @experimental def basicOperations( - zookeepers: String, - accumuloInstance: String, - accumuloUser: String, - accumuloPass: String, - geowaveNamespace: String - ): BasicAccumuloOperations = { - return new BasicAccumuloOperations( - zookeepers, - accumuloInstance, - accumuloUser, - accumuloPass, - geowaveNamespace) - } - - /** $experimental */ - @experimental def adapters(bao: BasicAccumuloOperations): Array[RasterDataAdapter] = { - val adapters = new AccumuloAdapterStore(bao).getAdapters - val retval = adapters.asScala - .map(_.asInstanceOf[RasterDataAdapter]) - .toArray - - adapters.close ; retval - } - - /** $experimental */ - @experimental def primaryIndex = (new SpatialDimensionalityTypeProvider.SpatialIndexBuilder).createIndex() - - /** $experimental */ - @experimental def subStrategies(idx: PrimaryIndex) = idx - .getIndexStrategy - .asInstanceOf[HierarchicalNumericIndexStrategy] - .getSubStrategies - -} - -/** - * @define experimental EXPERIMENTAL@experimental - */ -@experimental class GeoWaveAttributeStore( - val zookeepers: String, - val accumuloInstance: String, - val accumuloUser: String, - val accumuloPass: String, - val geowaveNamespace: String -) extends DiscreteLayerAttributeStore { - @transient private[this] lazy val logger = getLogger - - val zkInstance = (new ZooKeeperInstance(accumuloInstance, zookeepers)) - val token = new PasswordToken(accumuloPass) - val connector = zkInstance.getConnector(accumuloUser, token) - val delegate = AccumuloAttributeStore(connector, s"${geowaveNamespace}_ATTR") - - val basicAccumuloOperations = GeoWaveAttributeStore.basicOperations( - zookeepers, - accumuloInstance, - accumuloUser, - accumuloPass, - geowaveNamespace: String - ) - val accumuloRequiredOptions = GeoWaveAttributeStore.accumuloRequiredOptions( - zookeepers, - accumuloInstance, - accumuloUser, - accumuloPass, - geowaveNamespace - ) - val dataStore = new AccumuloDataStore(basicAccumuloOperations) - val dataStatisticsStore = new AccumuloDataStatisticsStore(basicAccumuloOperations) - - /** $experimental */ - @experimental def delete(tableName: String) = - connector.tableOperations.delete(tableName) - - /** $experimental */ - @experimental def boundingBoxes(): Map[ByteArrayId, BoundingBoxDataStatistics[Any]] = { - adapters.map({ adapter => - val adapterId = adapter.getAdapterId - val bboxId = BoundingBoxDataStatistics.STATS_ID - val bbox = dataStatisticsStore - .getDataStatistics(adapterId, bboxId) - .asInstanceOf[BoundingBoxDataStatistics[Any]] - - (adapterId, bbox) - }).toMap - } - - /** $experimental */ - @experimental def leastZooms(): Map[ByteArrayId, Int] = { - adapters.map({ adapter => - val adapterId = adapter.getAdapterId - val bbox = boundingBoxes.getOrElse(adapterId, throw new Exception(s"Unknown Adapter Id $adapterId")) - val zoom = bbox match { - case null => { - logger.warn(s"$adapterId has a broken bounding box") - 0 - } - case _ => { - val substrats = subStrategies - val width = bbox.getMaxX - bbox.getMinX - val height = bbox.getMaxY - bbox.getMinY - val zoom = (0 to subStrategies.length).toIterator.filter({ i => - val substrat = substrats(i) - val ranges = substrat.getIndexStrategy.getHighestPrecisionIdRangePerDimension - ((ranges(0) <= width) && (ranges(1) <= height)) - }).next - - zoom - } - } - - (adapterId, zoom) - }).toMap - } - - /** $experimental */ - @experimental def primaryIndex = GeoWaveAttributeStore.primaryIndex - - /** $experimental */ - @experimental def adapters = GeoWaveAttributeStore.adapters(basicAccumuloOperations) - - /** $experimental */ - @experimental def subStrategies = GeoWaveAttributeStore.subStrategies(primaryIndex) - - /** $experimental */ - @experimental def delete(layerId: LayerId, attributeName: String): Unit = - delegate.delete(layerId, attributeName) - - /** $experimental */ - @experimental def delete(layerId: LayerId): Unit = delegate.delete(layerId) - - /** $experimental */ - @experimental def readAll[T: Decoder](attributeName: String): Map[LayerId, T] = - delegate.readAll[T](attributeName) - - /** $experimental */ - @experimental def read[T: Decoder](layerId: LayerId, attributeName: String): T = - delegate.read[T](layerId, attributeName) - - /** $experimental */ - @experimental def write[T: Encoder](layerId: LayerId, attributeName: String, value: T): Unit = - delegate.write[T](layerId, attributeName, value) - - /** $experimental */ - @experimental def availableAttributes(layerId: LayerId) = - delegate.availableAttributes(layerId) - - /** - * Use GeoWave to see whether a layer really exists. - */ - @experimental private def gwLayerExists(layerId: LayerId): Boolean = { - val LayerId(name, zoom) = layerId - val candidateAdapters = adapters.filter(_.getCoverageName == name) - - if (candidateAdapters.nonEmpty) { - val adapterId = candidateAdapters.head.getAdapterId - val leastZoom = leastZooms.getOrElse( - adapterId, - throw new Exception(s"Unknown Adapter Id $adapterId") - ) - - ((leastZoom <= zoom) && (zoom < subStrategies.length)) - } else false - } - - /** - * $experimental Answer whether a layer exists (either in GeoWave - * or only in the AttributeStore). - */ - @experimental def layerExists(layerId: LayerId): Boolean = - gwLayerExists(layerId) || delegate.layerExists(layerId) - - /** - * Use GeoWave to get a list of actual LayerIds. - */ - @experimental private def gwLayerIds: Seq[LayerId] = { - val list = - for ( - adapter <- adapters; - zoom <- { - val adapterId = adapter.getAdapterId - val leastZoom = leastZooms.getOrElse( - adapter.getAdapterId, - throw new Exception(s"Unknown Adapter Id $adapterId") - ) - - (leastZoom until subStrategies.length) - } - ) yield LayerId(adapter.getCoverageName, zoom) - - list.distinct - } - - /** - * $experimental Return a complete list of LayerIds (those - * associated with actual GeoWave layers, as well as those only - * recorded in the AttributeStore). - */ - @experimental def layerIds: Seq[LayerId] = - (gwLayerIds ++ delegate.layerIds).distinct -} diff --git a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDReader.scala b/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDReader.scala deleted file mode 100644 index 767e267b6c..0000000000 --- a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDReader.scala +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright 2016 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package geotrellis.spark.store.geowave - -import geotrellis.geotools._ -import geotrellis.spark._ -import geotrellis.store._ -import geotrellis.store.avro._ -import geotrellis.store.avro.codecs._ -import geotrellis.spark.store.hadoop.formats._ -import geotrellis.spark.util.KryoWrapper -import geotrellis.util.annotations._ -import geotrellis.vector._ - -import org.apache.avro.Schema -import org.apache.hadoop.io._ -import org.apache.hadoop.fs.Path -import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat -import org.apache.hadoop.mapreduce.Job -import org.apache.spark.SparkContext -import org.apache.spark.rdd.RDD -import org.opengis.feature.simple._ -import mil.nga.giat.geowave.datastore.accumulo.operations.config._ -import mil.nga.giat.geowave.datastore.accumulo._ -import mil.nga.giat.geowave.core.store.query._ -import mil.nga.giat.geowave.mapreduce.input._ -import mil.nga.giat.geowave.core.store.spi._ -import mil.nga.giat.geowave.core.store._ -import mil.nga.giat.geowave.core.geotime.ingest._ -import mil.nga.giat.geowave.datastore.accumulo.operations.config._ -import mil.nga.giat.geowave.adapter.vector._ -import mil.nga.giat.geowave.core.store.operations.remote.options._ - -import scala.reflect._ - - -/** - * @define experimental EXPERIMENTAL@experimental - */ -@experimental object GeoWaveFeatureRDDReader { - - /** - * $experimental Read out an RDD of Vector features from an - * accumulo geowave store - * - * @param zookeepers zookeeper master node location - * @param accumuloInstanceName name of the accumulo instance to connect to - * @param accumuloInstanceUser user under whose authority accumulo actions should be carried out - * @param accumuloInstancePass password matching the provided user - * @param gwNamespace the geowave namespace for this data - * @param simpleFeatureType the GeoTools [[SimpleFeature]] specification - * @param query the geowave query to use in reading from accumulo - * @param queryLimit the maximum number of returned results (0 is infinite) - * - * @tparam G the type of geometry to be retrieved through geowave (REQUIRED) - * @note If the above type parameter is not supplied, errors WILL be thrown - */ - @experimental def read[G <: Geometry : ClassTag]( - zookeepers: String, - accumuloInstanceName: String, - accumuloInstanceUser: String, - accumuloInstancePass: String, - gwNamespace: String, - simpleFeatureType: SimpleFeatureType, - query: DistributableQuery = new BasicQuery(new BasicQuery.Constraints()), - queryLimit: Int = 0 - )(implicit sc: SparkContext): RDD[Feature[G, Map[String, Object]]] = { - val hadoopConf = sc.hadoopConfiguration - val jobConf = Job.getInstance(hadoopConf).getConfiguration - - val additionalAccumuloOpts = new AccumuloOptions - additionalAccumuloOpts.setUseAltIndex(true) - - val accumuloOpts = new AccumuloRequiredOptions - accumuloOpts.setZookeeper(zookeepers) - accumuloOpts.setInstance(accumuloInstanceName) - accumuloOpts.setUser(accumuloInstanceUser) - accumuloOpts.setPassword(accumuloInstancePass) - accumuloOpts.setGeowaveNamespace(gwNamespace) - accumuloOpts.setAdditionalOptions(additionalAccumuloOpts) - - val pluginOpts = new DataStorePluginOptions - pluginOpts.selectPlugin("accumulo") - pluginOpts.setFactoryOptions(accumuloOpts) - - val gwDataAdapter = new FeatureDataAdapter(simpleFeatureType) - val gw2dIndex = (new SpatialDimensionalityTypeProvider).createPrimaryIndex - val queryOptions = new QueryOptions(gwDataAdapter, gw2dIndex) - queryOptions.setLimit(queryLimit) - - GeoWaveInputFormat.setStoreOptions(jobConf, pluginOpts) - GeoWaveInputFormat.setQuery(jobConf, query) - GeoWaveInputFormat.setQueryOptions(jobConf, queryOptions) - - val simpleFeatureRDD = sc.newAPIHadoopRDD( - jobConf, - classOf[GeoWaveInputFormat[SimpleFeature]], - classOf[GeoWaveInputKey], - classOf[SimpleFeature] - ) - - simpleFeatureRDD.map({ case (gwInputKey, simpleFeature) => - simpleFeature.toFeature[G]() - }) - - } -} diff --git a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDWriter.scala b/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDWriter.scala deleted file mode 100644 index 249a95ea18..0000000000 --- a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDWriter.scala +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2016 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package geotrellis.spark.store.geowave - -import geotrellis.geotools._ -import geotrellis.spark._ -import geotrellis.store._ -import geotrellis.store.avro._ -import geotrellis.store.avro.codecs._ -import geotrellis.spark.store.hadoop.formats._ -import geotrellis.store.index._ -import geotrellis.spark.util.KryoWrapper -import geotrellis.util.annotations.experimental -import geotrellis.vector._ - -import org.apache.hadoop.io._ -import org.apache.hadoop.mapreduce.Job -import org.apache.spark.rdd.RDD -import org.apache.spark.SparkContext -import org.opengis.feature.simple._ -import mil.nga.giat.geowave.datastore.accumulo._ -import mil.nga.giat.geowave.mapreduce.input._ -import mil.nga.giat.geowave.core.store._ -import mil.nga.giat.geowave.core.store.spi._ -import mil.nga.giat.geowave.core.store._ -import mil.nga.giat.geowave.core.geotime.ingest._ -import mil.nga.giat.geowave.datastore.accumulo.operations.config._ -import mil.nga.giat.geowave.datastore.accumulo.metadata._ -import mil.nga.giat.geowave.datastore.accumulo.index.secondary._ -import mil.nga.giat.geowave.adapter.vector._ - -import scala.reflect._ - - -/** - * @define experimental EXPERIMENTAL@experimental - */ -@experimental object GeoWaveFeatureRDDWriter { - - /** - * $experimental Read out an RDD of Vector features from an - * accumulo geowave store - * - * @param features an RDD of [[geotrellis.vector.Feature]] objects to be written - * @param zookeepers zookeeper master node location - * @param accumuloInstanceName name of the accumulo instance to connect to - * @param accumuloInstanceUser user under whose authority accumulo actions should be carried out - * @param accumuloInstancePass password matching the provided user - * @param gwNamespace the geowave namespace for this data - * @param simpleFeatureType the GeoTools [[SimpleFeature]] specification which corresponds to - * all supplied features - * - * @tparam G the type of geometry to be retrieved through geowave (REQUIRED) - */ - @experimental def write[G <: Geometry, D]( - features: RDD[Feature[G, D]], - zookeepers: String, - accumuloInstanceName: String, - accumuloInstanceUser: String, - accumuloInstancePass: String, - gwNamespace: String, - simpleFeatureType: SimpleFeatureType - )(implicit transmute: D => Seq[(String, Any)]): Unit = { - implicit val sc = features.sparkContext - val trans = KryoWrapper(transmute) - val kryoFeatureType = KryoWrapper(simpleFeatureType) - features.foreachPartition({ featureIterator => - // Secure the basic operations - val accumuloOperations = - new BasicAccumuloOperations( - zookeepers, - accumuloInstanceName, - accumuloInstanceUser, - accumuloInstancePass, - gwNamespace - ) - - // Generate accumulo options instance - val accumuloOpts = new AccumuloOptions - accumuloOpts.setPersistDataStatistics(true) - - // Initialize geowave datastore - val gwDataStore = - new AccumuloDataStore( - new AccumuloIndexStore(accumuloOperations), - new AccumuloAdapterStore(accumuloOperations), - new AccumuloDataStatisticsStore(accumuloOperations), - new AccumuloSecondaryIndexDataStore(accumuloOperations), - new AccumuloAdapterIndexMappingStore(accumuloOperations), - accumuloOperations, - accumuloOpts - ) - - val gwDataAdapter = new FeatureDataAdapter(kryoFeatureType.value) - val gw2dIndex = (new SpatialDimensionalityTypeProvider).createPrimaryIndex - - val writer = - gwDataStore - .createWriter(gwDataAdapter, gw2dIndex) - .asInstanceOf[IndexWriter[SimpleFeature]] - - featureIterator.foreach({ feature => - writer.write(feature.toSimpleFeature()) - }) - writer.close() - }) - } -} diff --git a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveLayerReader.scala b/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveLayerReader.scala deleted file mode 100644 index 74e28a668b..0000000000 --- a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveLayerReader.scala +++ /dev/null @@ -1,284 +0,0 @@ -/* - * Copyright 2016 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package geotrellis.spark.store.geowave - -import geotrellis.proj4.LatLng -import geotrellis.vector.Extent -import geotrellis.geotools._ -import geotrellis.raster._ -import geotrellis.layer._ -import geotrellis.store._ -import geotrellis.store.avro._ -import geotrellis.store.index.KeyIndex -import geotrellis.spark.ContextRDD -import geotrellis.util._ -import geotrellis.util.annotations.experimental - -import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter -import mil.nga.giat.geowave.core.geotime.ingest._ -import mil.nga.giat.geowave.core.geotime.store.query.IndexOnlySpatialQuery -import mil.nga.giat.geowave.core.index.ByteArrayId -import mil.nga.giat.geowave.core.store._ -import mil.nga.giat.geowave.core.store.index.CustomIdIndex -import mil.nga.giat.geowave.core.store.operations.remote.options.DataStorePluginOptions -import mil.nga.giat.geowave.core.store.query.QueryOptions -import mil.nga.giat.geowave.datastore.accumulo._ -import mil.nga.giat.geowave.datastore.accumulo.metadata._ -import mil.nga.giat.geowave.mapreduce.input.{GeoWaveInputFormat, GeoWaveInputKey} - -import org.apache.hadoop.mapreduce.Job -import org.apache.spark.rdd.RDD -import org.apache.spark.SparkContext -import org.apache.avro.Schema -import org.apache.hadoop.io.Text -import org.geotools.coverage.grid._ -import org.locationtech.jts.geom._ -import _root_.io.circe._ - -import scala.reflect._ - - -/** - * @define experimental EXPERIMENTAL@experimental - */ -object GeoWaveLayerReader { - private val geometryFactory = new GeometryFactory - private val tileClassTag = classTag[Tile] - private val mbtClassTag = classTag[MultibandTile] - - /** - * $experimental Given a map transform and a keybounds, produce a - * corresponding jts.Geometry. - * - * @param mt The map transform - * @param kb The KeyBounds - */ - @experimental def keyBoundsToGeometry(mt: MapKeyTransform, kb: KeyBounds[SpatialKey]) = { - val KeyBounds(minKey, maxKey) = kb - val Extent(lng1, lat1, lng2, lat2) = mt(minKey) - val Extent(lng3, lat3, lng4, lat4) = mt(maxKey) - val lngs = List(lng1, lng2, lng3, lng4) - val lats = List(lat1, lat2, lat3, lat4) - val width = math.abs(lng1 - lng2) - val height = math.abs(lat1 - lat2) - val minLng = lngs.min - val maxLng = lngs.max - val minLat = lats.min - val maxLat = lats.max - val envelope = new Envelope( - minLng + width/3, - maxLng - width/3, - minLat + height/3, - maxLat - height/3) - - geometryFactory.toGeometry(envelope) - } -} - -/** - * @define experimental EXPERIMENTAL@experimental - */ -@experimental class GeoWaveLayerReader(val attributeStore: AttributeStore) - (implicit sc: SparkContext) { - - val defaultNumPartitions = sc.defaultParallelism - - val gas = attributeStore.asInstanceOf[GeoWaveAttributeStore] - - @experimental private def adapters = gas.adapters - @experimental private def basicOperations = gas.basicAccumuloOperations - @experimental private def bboxMap = gas.boundingBoxes - @experimental private def index = gas.primaryIndex - @experimental private def requiredOptions = gas.accumuloRequiredOptions - @experimental private def substrats = gas.subStrategies - - /** - * $experimental Compute the common part of the - * org.apache.hadoop.conf.Configuration associated with this layer. - * This result can be reused by changing the Query and QueryOptions - * as desired. - */ - @experimental def computeConfiguration()(implicit sc: SparkContext) = { - val pluginOptions = new DataStorePluginOptions - pluginOptions.setFactoryOptions(requiredOptions) - val job = Job.getInstance(sc.hadoopConfiguration) - val config = job.getConfiguration - GeoWaveInputFormat.setStoreOptions(config, pluginOptions) - - config - } - - /** - * $experimental Compute the metadata associated with this layer. - * - * @param adapter The RasterDataAdapter associated with the chosen layer - * @param ranges The ranges in degrees of longitude and latitude associated with the chosen tier - */ - @experimental def computeSpatialMetadata( - adapter: RasterDataAdapter, - ranges: Array[Double] - ): (TileLayerMetadata[SpatialKey], Int, Int) = { - val adapterId = adapter.getAdapterId - - val metadata = adapter.getMetadata - - val bbox = bboxMap.getOrElse(adapterId, throw new Exception(s"Unknown Adapter Id $adapterId")) - - val minX = bbox.getMinX - val minY = bbox.getMinY - val maxX = bbox.getMaxX - val maxY = bbox.getMaxY - val minCol = (minX / ranges(0)).toInt - val minRow = (minY / ranges(1)).toInt - val maxCol = (maxX / ranges(0)).toInt - val maxRow = (maxY / ranges(1)).toInt - - val extent = Extent( - minCol * ranges(0), - minRow * ranges(1), - (maxCol + 1) * ranges(0), - (maxRow + 1) * ranges(1) - ) - - val layout = { - val tileSize = adapter.getTileSize - val tileLayout = TileLayout(maxCol - minCol + 1, maxRow - minRow + 1, tileSize, tileSize) - LayoutDefinition(extent, tileLayout) - } - - val cellType = metadata.get("cellType") match { - case null => { - val geom = (new GeometryFactory).createPoint(new Coordinate((minX + maxX) / 2.0, (minY + maxY) / 2.0)) - val queryOptions = new QueryOptions(adapter, index) - val query = new IndexOnlySpatialQuery(geom) - val config = computeConfiguration - GeoWaveInputFormat.setQuery(config, query) - GeoWaveInputFormat.setQueryOptions(config, queryOptions) - - val gc = sc.newAPIHadoopRDD( - config, - classOf[GeoWaveInputFormat[GridCoverage2D]], - classOf[GeoWaveInputKey], - classOf[GridCoverage2D]) - .map({ case (_, gc) => gc }) - .collect.head - - GridCoverage2DConverters.getCellType(gc) - } - case s: String => CellType.fromName(s) - } - - val bounds = KeyBounds( - SpatialKey(0, 0), - SpatialKey(maxCol - minCol, maxRow - minRow) - ) - - (TileLayerMetadata(cellType, layout, extent, LatLng, bounds), minCol, maxRow) - } - - /** - * $experimental Read particular rasters out of the GeoWave - * database. The particular rasters to read are given by the - * result of running the provided LayerQuery. - * - * @param id The LayerId specifying the name and tier to query - * @param rasterQuery Produces a list of rasters to read - * @param numPartitions The number of Spark partitions to use - * @param filterIndexOnly ? - */ - @experimental def read[ - K <: SpatialKey, - V: TileOrMultibandTile: ClassTag, - M: Decoder: GetComponent[*, Bounds[K]] - ](id: LayerId, rasterQuery: LayerQuery[K, M]) = { - import GeoWaveLayerReader._ - - /* Perform checks */ - if (!attributeStore.layerExists(id)) - throw new LayerNotFoundError(id) - - /* Boilerplate */ - val LayerId(name, tier) = id - val adapter = adapters.filter(_.getCoverageName == name).head - val strategy = substrats(tier) - val ranges = strategy.getIndexStrategy.getHighestPrecisionIdRangePerDimension - val customIndex = new CustomIdIndex(strategy.getIndexStrategy, index.getIndexModel, index.getId) - - /* GeoTrellis metadata */ - val (_md, minCol, maxRow) = computeSpatialMetadata(adapter, ranges) - val md = _md.asInstanceOf[M] - - /* GeoWave Query and Query Options */ - val queryOptions = new QueryOptions(adapter, customIndex) - val query = { - val fn = keyBoundsToGeometry(_md.mapTransform, _: KeyBounds[K]) - val kbs = rasterQuery(md) - - val geom = if (kbs.nonEmpty) { kbs - .map({ kb: KeyBounds[K] => fn(kb) }) - .reduce({ (l, r) => l.union(r) }) - } else { - geometryFactory.createPoint(null.asInstanceOf[Coordinate]) - } - - new IndexOnlySpatialQuery(geom) - } - - /* Construct org.apache.hadoop.conf.Configuration */ - val config = computeConfiguration - GeoWaveInputFormat.setQuery(config, query) - GeoWaveInputFormat.setQueryOptions(config, queryOptions) - - /* Submit query */ - val rdd = - sc.newAPIHadoopRDD( - config, - classOf[GeoWaveInputFormat[GridCoverage2D]], - classOf[GeoWaveInputKey], - classOf[GridCoverage2D]) - .map({ case (_, gc) => - val Extent(lng, lat, _, _) = GridCoverage2DConverters.getExtent(gc) - val key = SpatialKey( - (lng / ranges(0)).toInt - minCol, - maxRow - (lat / ranges(1)).toInt - ).asInstanceOf[K] - val value = implicitly[ClassTag[V]] match { - case `tileClassTag` => gc.toTile(0).asInstanceOf[V] - case `mbtClassTag` => gc.toMultibandTile.asInstanceOf[V] - } - (key, value) - }) - - new ContextRDD(rdd, md) - } - - /** $experimental */ - @experimental def read[ - K <: SpatialKey: Boundable, - V: TileOrMultibandTile: ClassTag, - M: Decoder: GetComponent[*, Bounds[K]] - ](id: LayerId): RDD[(K, V)] with Metadata[M] = - read(id, new LayerQuery[K, M]) - - /** $experimental */ - @experimental def query[ - K <: SpatialKey: Boundable, - V: TileOrMultibandTile: ClassTag, - M: Decoder: GetComponent[*, Bounds[K]] - ](layerId: LayerId): BoundLayerQuery[K, M, RDD[(K, V)] with Metadata[M]] = - new BoundLayerQuery(new LayerQuery, read(layerId, _)) -} diff --git a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveLayerWriter.scala b/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveLayerWriter.scala deleted file mode 100644 index ee366fc68a..0000000000 --- a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveLayerWriter.scala +++ /dev/null @@ -1,304 +0,0 @@ -/* - * Copyright 2016 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package geotrellis.spark.store.geowave - -import geotrellis.proj4.LatLng -import geotrellis.vector.Extent -import geotrellis.geotools._ -import geotrellis.raster._ -import geotrellis.layer._ -import geotrellis.store._ -import geotrellis.store.avro._ -import geotrellis.store.index.KeyIndex -import geotrellis.store.accumulo.AccumuloInstance -import geotrellis.spark._ -import geotrellis.spark.store.accumulo.AccumuloWriteStrategy -import geotrellis.util._ -import geotrellis.util.annotations.experimental - -import _root_.io.circe._ - -import org.log4s._ - -import mil.nga.giat.geowave.adapter.raster.adapter.merge.RasterTileRowTransform -import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter -import mil.nga.giat.geowave.core.geotime.index.dimension._ -import mil.nga.giat.geowave.core.geotime.ingest._ -import mil.nga.giat.geowave.core.index.sfc.SFCDimensionDefinition -import mil.nga.giat.geowave.core.index.sfc.SFCFactory.SFCType -import mil.nga.giat.geowave.core.index.sfc.tiered.TieredSFCIndexFactory -import mil.nga.giat.geowave.core.store._ -import mil.nga.giat.geowave.core.store.operations.remote.options.DataStorePluginOptions -import mil.nga.giat.geowave.core.store.adapter.statistics.StatsCompositionTool -import mil.nga.giat.geowave.core.store.data.VisibilityWriter -import mil.nga.giat.geowave.core.store.util.DataStoreUtils -import mil.nga.giat.geowave.datastore.accumulo._ -import mil.nga.giat.geowave.datastore.accumulo.metadata._ -import mil.nga.giat.geowave.datastore.accumulo.operations.config.AccumuloOptions -import mil.nga.giat.geowave.datastore.accumulo.util._ -import mil.nga.giat.geowave.datastore.accumulo.util.AccumuloUtils - -import org.apache.accumulo.core.client.security.tokens.PasswordToken -import org.apache.accumulo.core.data.{Key, Value} -import org.apache.spark.rdd.RDD -import org.apache.spark.SparkContext - -import org.geotools.coverage.grid.GridCoverage2D -import org.geotools.coverage.processing.CoverageProcessor -import org.geotools.util.factory.{GeoTools, Hints} - -import org.opengis.coverage.grid.GridCoverage -import org.opengis.parameter.ParameterValueGroup - -import java.io.{DataInputStream, DataOutputStream, File} -import java.util.UUID - -import javax.imageio.ImageIO -import javax.media.jai.{ImageLayout, JAI} - -import scala.collection.JavaConverters._ -import scala.collection.mutable.ListBuffer -import scala.reflect._ - -import resource._ - -/** - * @define experimental EXPERIMENTAL@experimental - */ -@experimental object GeoWaveLayerWriter { - @transient private[this] lazy val logger = getLogger - - /** $experimental */ - @experimental def write[ - K <: SpatialKey: ClassTag, - V: TileOrMultibandTile: ClassTag, - M: Encoder: GetComponent[*, Bounds[K]] - ]( - coverageName: String, - bits: Int, - rdd: RDD[(K, V)] with Metadata[M], - as: GeoWaveAttributeStore, - accumuloWriter: AccumuloWriteStrategy - ): Unit = { - val metadata = rdd.metadata.asInstanceOf[TileLayerMetadata[SpatialKey]] - - val crs = metadata.crs - val mt = metadata.mapTransform - val cellType = metadata.cellType.toString - val specimen = rdd.first - - /* Construct (Multiband|)Tile to GridCoverage2D conversion function */ - val rectify = GeoWaveUtil.rectify(bits)_ - val geotrellisKvToGeotools: ((K, V)) => GridCoverage2D = { - case (k: SpatialKey, _tile: V) => - val Extent(minX, minY, maxX, maxY) = mt(k.asInstanceOf[SpatialKey]).reproject(crs, LatLng) - val extent = Extent(rectify(minX), rectify(minY), rectify(maxX), rectify(maxY)) - - _tile match { - case tile: Tile => - ProjectedRaster(Raster(tile, extent), LatLng).toGridCoverage2D - case tile: MultibandTile => - ProjectedRaster(Raster(tile, extent), LatLng).toGridCoverage2D - } - } - val image = geotrellisKvToGeotools(specimen) - - val pluginOptions = new DataStorePluginOptions - pluginOptions.setFactoryOptions(as.accumuloRequiredOptions) - - val configOptions = pluginOptions.getOptionsAsMap - - val geotrellisKvToGeoWaveKv: Iterable[(K, V)] => Iterable[(Key, Value)] = { pairs => - { - val gwMetadata = new java.util.HashMap[String, String](); gwMetadata.put("cellType", cellType) - - /* Produce mosaic from all of the tiles in this partition */ - val sources = new java.util.ArrayList(pairs.map(geotrellisKvToGeotools).asJavaCollection) - val accumuloKvs = ListBuffer[Iterable[(Key, Value)]]() - - /* Objects for writing into GeoWave */ - if (sources.size > 0) { - val processor = CoverageProcessor.getInstance(GeoTools.getDefaultHints()) - val param = processor.getOperation("Mosaic").getParameters() - val hints = new Hints - val imageLayout = new ImageLayout - imageLayout.setTileHeight(256) - imageLayout.setTileWidth(256) - - logger.info(s"partition size = ${sources.size}") - param.parameter("Sources").setValue(sources) - hints.put(JAI.KEY_IMAGE_LAYOUT, imageLayout) - - val index = (new SpatialDimensionalityTypeProvider.SpatialIndexBuilder).createIndex() - - val image = processor.doOperation(param, hints).asInstanceOf[GridCoverage2D] - val adapter = new RasterDataAdapter( - coverageName, - gwMetadata, - image, // image only used for sample and color metadata, not data - 256, false, false, - Array.fill[Array[Double]](image.getNumSampleDimensions)(Array(0.0))) // overriding default merge strategy because geotrellis data is already tiled (non-overlapping) - - for ( - statsAggregator <- managed(new StatsCompositionTool(new DataStoreStatisticsProvider( - adapter, - index, - true), - GeoWaveStoreFinder.createDataStatisticsStore(configOptions))) - ) { - val kvGen = new AccumuloKeyValuePairGenerator[GridCoverage]( - adapter, - index, - statsAggregator, - DataStoreUtils.UNCONSTRAINED_VISIBILITY.asInstanceOf[VisibilityWriter[GridCoverage]]) - - adapter.convertToIndex(index, image).asScala.foreach({ i => - val keyValues = kvGen.constructKeyValuePairs(adapter.getAdapterId.getBytes, i).asScala.toList - val keyValuePairs = keyValues.map({ kv => (kv.getKey, kv.getValue) }) - accumuloKvs += keyValuePairs - }) - } - } - accumuloKvs.foldLeft(Iterable[(Key, Value)]())(_ ++ _) - } - } - - val index = (new SpatialDimensionalityTypeProvider.SpatialIndexBuilder).createIndex() - val indexName = index.getId.getString - val tableName = AccumuloUtils.getQualifiedTableName(as.geowaveNamespace, indexName) - - val gwMetadata = new java.util.HashMap[String, String](); gwMetadata.put("cellType", cellType) - val basicOperations = new BasicAccumuloOperations( - as.zookeepers, - as.accumuloInstance, - as.accumuloUser, - as.accumuloPass, - as.geowaveNamespace) - val adapter = new RasterDataAdapter( - coverageName, - gwMetadata, - image, - 256, true, false, - Array.fill[Array[Double]](image.getNumSampleDimensions)(Array(0.0)) - ) - - // make sure adapter gets written - val adapterStore = new AccumuloAdapterStore(basicOperations) - adapterStore.addAdapter(adapter) - - // make sure index gets written - val indexStore = new AccumuloIndexStore(basicOperations) - indexStore.addIndex(index) - - // make sure adapter and index are associated together in the mapping store - val mappingStore = new AccumuloAdapterIndexMappingStore(basicOperations) - mappingStore.addAdapterIndexMapping(new AdapterToIndexMapping(adapter.getAdapterId, Array(index.getId))) - - AccumuloUtils.attachRowMergingIterators( - adapter, - basicOperations, - new AccumuloOptions, - index.getIndexStrategy().getNaturalSplits(), - indexName) - accumuloWriter.write( - rdd - .sortBy({ case (k, v) => SpatialKey.keyToTup(k.asInstanceOf[SpatialKey]) }) - .groupBy({ case (k, _) => k.asInstanceOf[SpatialKey]._1 }) - .map(_._2) - .mapPartitions({ partitions => partitions.map(geotrellisKvToGeoWaveKv) }) - .flatMap(i => i), // first argument to accumuloWriter.write - AccumuloInstance( - as.accumuloInstance, - as.zookeepers, - as.accumuloUser, - new PasswordToken(as.accumuloPass) - ), // second argument - tableName // third argument - ) - - val conn = ConnectorPool.getInstance.getConnector( - as.zookeepers, - as.accumuloInstance, - as.accumuloUser, - as.accumuloPass - ) - - // compact - val ops = conn.tableOperations() - ops.compact(tableName, null, null, true, true) - - // detach iterator - val iterators = ops.listIterators(tableName).asScala - iterators.foreach { kv => - if (kv._1.startsWith(RasterTileRowTransform.TRANSFORM_NAME)) { - ops.removeIterator(tableName, kv._1, kv._2) - } - } - } -} - -/** - * @define experimental EXPERIMENTAL@experimental - */ -@experimental class GeoWaveLayerWriter( - val attributeStore: GeoWaveAttributeStore, - val accumuloWriter: AccumuloWriteStrategy -)(implicit sc: SparkContext) { - @transient private[this] lazy val logger = getLogger - - /** $experimental */ - @experimental def write[ - K <: SpatialKey: ClassTag, - V: TileOrMultibandTile: ClassTag, - M: Encoder: GetComponent[*, Bounds[K]] - ](id: LayerId, layer: RDD[(K, V)] with Metadata[M], bits: Int = 0): Unit = - layer.metadata.getComponent[Bounds[K]] match { - case keyBounds: KeyBounds[K] => - _write[K, V, M](id, layer, bits) - case EmptyBounds => - throw new EmptyBoundsError("Cannot write layer with empty bounds.") - } - - /** $experimental */ - @experimental protected def _write[ - K <: SpatialKey: ClassTag, - V: TileOrMultibandTile: ClassTag, - M: Encoder: GetComponent[*, Bounds[K]] - ]( - layerId: LayerId, - rdd: RDD[(K, V)] with Metadata[M], - bits: Int - ): Unit = { - val LayerId(coverageName, tier) = layerId - val specimen = rdd.first - - if (tier != 0) - logger.warn(s"GeoWave has its own notion of levels/tiering, so $tier in $layerId will be ignored") - - if (bits <= 0) - logger.warn("It is highly recommended that you specify a bit precision when writing into GeoWave") - - GeoWaveLayerWriter.write( - coverageName, - (if (bits <= 0) 0; else bits), - rdd, - attributeStore, - accumuloWriter - ) - } - -} diff --git a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveUtil.scala b/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveUtil.scala deleted file mode 100644 index 80587fd5fb..0000000000 --- a/geowave/src/main/scala/geotrellis/spark/store/geowave/GeoWaveUtil.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2016 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package geotrellis.spark.store.geowave - -import geotrellis.util.annotations.experimental - -import scala.math.{ abs, pow, round } - - -/** - * @define experimental EXPERIMENTAL@experimental - */ -@experimental object GeoWaveUtil { - - /** - * $experimental If an edge or corner of an extent is very close to a split in - * the GeoWave index (for some given number of bits), then it - * should be snapped to the split to avoid pathological behavior). - */ - @experimental def rectify(bits: Int)(_x: Double) = { - val division = if (bits > 0) pow(2, -bits) ; else 1 - val x = (_x / 360.0) / division - val xPrime = round(x) - - 360 * division * (if (abs(x - xPrime) < 0.000001) xPrime ; else x) - } -} diff --git a/geowave/src/main/scala/geotrellis/spark/store/geowave/SerializablePersistable.scala b/geowave/src/main/scala/geotrellis/spark/store/geowave/SerializablePersistable.scala deleted file mode 100644 index e1f447706e..0000000000 --- a/geowave/src/main/scala/geotrellis/spark/store/geowave/SerializablePersistable.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2016 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package geotrellis.spark.store.geowave - -import geotrellis.util.annotations.experimental - -import mil.nga.giat.geowave.core.index.Persistable -import mil.nga.giat.geowave.core.index.PersistenceUtils -import org.apache.hadoop.io.ObjectWritable -import org.apache.spark.util.Utils - -import java.io.{ObjectInputStream, ObjectOutputStream} - - -/** - * @define experimental EXPERIMENTAL@experimental - */ -@experimental class SerializablePersistable[T <: Persistable](@transient var t: T) - extends Serializable { - - /** $experimental */ - @experimental def value: T = t - - /** $experimental */ - @experimental override def toString: String = t.toString - - @experimental private def writeObject(out: ObjectOutputStream): Unit = { - val bytes = PersistenceUtils.toBinary(t) - out.writeInt(bytes.length) - out.write(bytes) - } - - @experimental private def readObject(in: ObjectInputStream): Unit = { - val length = in.readInt() - val bytes = new Array[Byte](length) - in.readFully(bytes) - t=PersistenceUtils.fromBinary(bytes, classOf[Persistable]).asInstanceOf[T] - } -} diff --git a/geowave/src/main/scala/geotrellis/spark/store/kryo/GeoWaveKryoRegistrator.scala b/geowave/src/main/scala/geotrellis/spark/store/kryo/GeoWaveKryoRegistrator.scala deleted file mode 100644 index edc019428d..0000000000 --- a/geowave/src/main/scala/geotrellis/spark/store/kryo/GeoWaveKryoRegistrator.scala +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2016 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package geotrellis.spark.store.kryo - -import geotrellis.util.annotations.experimental - -import org.apache.accumulo.core.data.Key -import org.geotools.coverage.grid.GridCoverage2D -import org.geotools.data.DataUtilities -import org.opengis.feature.simple.SimpleFeatureType - -import com.esotericsoftware.kryo.io.{ Input, Output } -import com.esotericsoftware.kryo.Kryo -import com.esotericsoftware.kryo.Serializer - -import de.javakaffee.kryoserializers._ - -import mil.nga.giat.geowave.core.index.{ Persistable, PersistenceUtils } - -import java.io.{ ByteArrayInputStream, ByteArrayOutputStream } -import java.io.{ ObjectInputStream, ObjectOutputStream } - - -/** - * @define experimental EXPERIMENTAL@experimental - */ -@experimental class GeoWaveKryoRegistrator extends KryoRegistrator { - - /** $experimental */ - @experimental override def registerClasses(kryo: Kryo) = { - UnmodifiableCollectionsSerializer.registerSerializers(kryo) - kryo.addDefaultSerializer(classOf[Persistable], new PersistableSerializer()) - kryo.addDefaultSerializer(classOf[GridCoverage2D], new DelegateSerializer[GridCoverage2D]()) - kryo.register(classOf[Key]) - super.registerClasses(kryo) - } - - /** $experimental Default serializer for any GeoWave Persistable object */ - @experimental private class PersistableSerializer extends Serializer[Persistable] { - override def write(kryo: Kryo, output: Output, geowaveObj: Persistable): Unit = { - val bytes = PersistenceUtils.toBinary(geowaveObj) - output.writeInt(bytes.length) - output.writeBytes(bytes) - } - - override def read(kryo: Kryo, input: Input, t: Class[Persistable]): Persistable = { - val length = input.readInt() - val bytes = new Array[Byte](length) - input.read(bytes) - - PersistenceUtils.fromBinary(bytes, classOf[Persistable]) - } - } - - /** - * $experimental Serializer for difficult types. This simply - * delegates to Java Serialization. - */ - @experimental private class DelegateSerializer[T] extends Serializer[T] { - override def write(kryo: Kryo, output: Output, x: T): Unit = { - val bs = new ByteArrayOutputStream - val oos = new ObjectOutputStream(bs) - - oos.writeObject(x) - - val bytes = bs.toByteArray - - output.writeInt(bytes.length) - output.writeBytes(bytes) - bs.close ; oos.close - } - - override def read(kryo: Kryo, input: Input, t: Class[T]): T = { - val length = input.readInt - val bytes = new Array[Byte](length) - - input.read(bytes) - - val bs = new ByteArrayInputStream(bytes) - val ois = new ObjectInputStream(bs) - val x = ois.readObject.asInstanceOf[T] - - bs.close ; ois.close - x - } - } - -} diff --git a/geowave/src/test/resources/logback.xml b/geowave/src/test/resources/logback.xml new file mode 100644 index 0000000000..053890d2d2 --- /dev/null +++ b/geowave/src/test/resources/logback.xml @@ -0,0 +1,31 @@ + + + + + + %white(%d{HH:mm:ss.SSS}) %highlight(%-5level) %cyan(%logger{50}) - %msg %n + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/geowave/src/test/resources/raster/all-ones.tif b/geowave/src/test/resources/raster/all-ones.tif new file mode 100644 index 0000000000000000000000000000000000000000..ab2399356444054927d86344538528358dc97529 GIT binary patch literal 7040 zcmebEWzb?^kT}2~z{tSB!oa`)rWhHK*h~z}U~vVYBr}BFunEX!gNg%nFp46vosjf8 zBe7jzY{oD~R~VcB7h^C~FVL1|J_a73T_F3;wKRd*Y(Vz2_Vr*f37}14?Hd`GfMOg# zw%d|MINJ{>#$fO5*SO_{G>motiQxkdiQDEfXI`*4VAv|RBk*+xTrDFg92i(Z0Rm(( zv4D6$F`%VDTHqN6BhVG>Ku3UF$Ee&F?jEdQVN%1OYY7B;1`HLq-X3P;Wnkc8QkWh2 z_9Ac5jTfq+qhK@yMnhmU1V%$(Gz3ONU^E0qLtr!nMnhmU1V%$(Gz3ONV919+ZO!2c zn*|w!fI|h~@e=T03L_Gm)NvFUh+fc01fvs@InGFIlEzeo^blhzLT-pL6`?Z3n2OM{ zB@N&)l`jkoLfe+W$5({bfrdT?%lL|oyNjcbZ)!9sIQVjabj*k4p7cO$MGB z7>XtXnCooxQT@okAWV~#Wdx3G5e5b^0qUeOktv8&Cb9;R%0%uUQkf_ZB9)1XAX1qa zcz|u7QyI~zi(cu3yktyL>Vl128W|W`S{WKz85k-U8d(__01?eoo;V+MQl7XHBISu& zAyS@r79!<|&w`{p;D|MY_##Bglh^?&->~_9P^U)ska!o@fDn&CRjkn=xxz=~kv65wplQ>RfkDa+kv63(hGg1w^>GaY*2MmPz{IHsuh&rP I>JguQ0bcM)_y7O^ literal 0 HcmV?d00001 diff --git a/geowave/src/test/resources/reference.conf b/geowave/src/test/resources/reference.conf new file mode 100644 index 0000000000..57da84c4c9 --- /dev/null +++ b/geowave/src/test/resources/reference.conf @@ -0,0 +1,13 @@ +geotrellis.raster.gdal { + acceptable-datasets = ["SOURCE", "WARPED"] + number-of-attempts = 1048576 +} + +geotrellis.geowave.connection.store { + data-store-type = "cassandra" + options = { + "contactPoints": "localhost", + "contactPoints": ${?CASSANDRA_HOST}, + "gwNamespace" : "geotrellis" + } +} diff --git a/geowave/src/test/scala/geotrellis/geowave/TestEnvironment.scala b/geowave/src/test/scala/geotrellis/geowave/TestEnvironment.scala new file mode 100644 index 0000000000..9c1733e800 --- /dev/null +++ b/geowave/src/test/scala/geotrellis/geowave/TestEnvironment.scala @@ -0,0 +1,35 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave + +import org.scalatest.funspec.AnyFunSpec + +import scala.util.Properties +import org.locationtech.geowave.core.store.api.DataStore +import org.locationtech.geowave.core.store.api.DataStoreFactory +import org.locationtech.geowave.datastore.cassandra.config.{CassandraOptions, CassandraRequiredOptions} + +class TestEnvironment extends AnyFunSpec { + val kafka: String = Properties.envOrElse("KAFKA_HOST", "localhost:9092") + val cassandra: String = Properties.envOrElse("CASSANDRA_HOST", "localhost") + + def getDataStore(name: String): DataStore = { + DataStoreFactory.createDataStore( + new CassandraRequiredOptions(cassandra, name, + new CassandraOptions())) + } +} diff --git a/geowave/src/test/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapterSPISpec.scala b/geowave/src/test/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapterSPISpec.scala new file mode 100644 index 0000000000..1b90467b54 --- /dev/null +++ b/geowave/src/test/scala/geotrellis/geowave/adapter/GeoTrellisDataAdapterSPISpec.scala @@ -0,0 +1,45 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter + +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.adapter.geotiff._ + +import geotrellis.raster.MultibandTile +import geotrellis.raster.io.geotiff.GeoTiff +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers + +class GeoTrellisDataAdapterSPISpec extends AnyFunSpec with Matchers { + describe("GeoTrellisDataAdapterSPISpec") { + it("should create a GeoTiffAdapter") { + val typeName = "TestGeoTiff".typeName + + val actual = GeoTrellisDataAdapter.load(DataTypeGeoTiff, typeName).asInstanceOf[GeoTiffAdapter] + val expected = new GeoTiffAdapter(typeName) + + actual.getClass shouldBe expected.getClass + actual.getTypeName shouldBe expected.getTypeName + actual.getFieldHandlers.map { _.getFieldName } shouldBe expected.getFieldHandlers.map { _.getFieldName } + actual.toBinary should contain theSameElementsAs expected.toBinary + } + + it("should not create Adapters for non implemented types") { + intercept[RuntimeException] { GeoTrellisDataAdapter.load("NewDataType".dataType, "NewTypeName".typeName) } + } + } +} diff --git a/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapterSpec.scala b/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapterSpec.scala new file mode 100644 index 0000000000..9df2b5a732 --- /dev/null +++ b/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/GeoTiffAdapterSpec.scala @@ -0,0 +1,153 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.geotiff + +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.api.SQueryBuilder +import geotrellis.raster.MultibandTile +import geotrellis.raster.io.geotiff.{GeoTiff, MultibandGeoTiff} +import geotrellis.vector.Extent +import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder +import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery +import org.locationtech.geowave.core.store.CloseableIterator +import org.locationtech.geowave.core.store.api.{DataStore, DataStoreFactory, Writer} +import org.locationtech.geowave.datastore.cassandra.config.{CassandraOptions, CassandraRequiredOptions} + +import geotrellis.geowave.TestEnvironment +import org.scalatest.matchers.should.Matchers + +class GeoTiffAdapterSpec extends TestEnvironment with Matchers { + lazy val uri: String = "src/test/resources/raster/all-ones.tif" + + describe("GeoTiffAdapterSpec spatial index spec") { + // This describes how to index the data + val index = new SpatialIndexBuilder().createIndex + + // Register our adapter in a datastore + lazy val geowaveDataStore: DataStore = DataStoreFactory.createDataStore(new CassandraRequiredOptions(cassandra, "GeoTiffAdapterSpec", new CassandraOptions())) + lazy val tiff = MultibandGeoTiff(uri) + lazy val dataTypeAdapter = new GeoTiffAdapter("GeoTiffAdapterSpec".typeName) + geowaveDataStore.addType(dataTypeAdapter, index) + + it("should write spatially indexed GeoTiff") { + val indexWriter: Writer[GeoTiff[MultibandTile]] = geowaveDataStore.createWriter(dataTypeAdapter.getTypeName) + try indexWriter.write(tiff) finally { + if (indexWriter != null) indexWriter.close() + } + } + + it("should query the entire contents of a store") { + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should query store by extent") { + val extent = tiff.extent + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should return nothing querying out of the tiff bounds") { + val extent = Extent(0, 0, 1, 1) + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should delete nothing by the query out of bounds") { + val extent = Extent(0, 0, 1, 1) + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val dquery = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + geowaveDataStore.delete(dquery.build()) shouldBe true + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + } + + it("should delete everything by the query") { + val extent = tiff.extent + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + geowaveDataStore.delete(query.build()) shouldBe true + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + } + + describe("GeoTiffAdapterSpec serialization spec") { + it("should be serialized properly") { + val expected = new GeoTiffAdapter("MultibandGeoTiffDataAdapterSpec".typeName) + val actual = new GeoTiffAdapter() + + actual.fromBinary(expected.toBinary) + + actual.getTypeName shouldBe expected.getTypeName + actual.getFieldHandlers.map { _.getFieldName } shouldBe expected.getFieldHandlers.map { _.getFieldName } + } + } +} diff --git a/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/IMMultibandGeoTiffAdapterSpec.scala b/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/IMMultibandGeoTiffAdapterSpec.scala new file mode 100644 index 0000000000..1317f6287e --- /dev/null +++ b/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/IMMultibandGeoTiffAdapterSpec.scala @@ -0,0 +1,155 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.geotiff + +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.api.SQueryBuilder +import geotrellis.geowave.api.SQueryBuilder +import geotrellis.raster.MultibandTile +import geotrellis.raster.io.geotiff.{GeoTiff, MultibandGeoTiff} +import geotrellis.vector.Extent +import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder +import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery +import org.locationtech.geowave.core.store.CloseableIterator +import org.locationtech.geowave.core.store.api.{DataStore, DataStoreFactory, Writer} +import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers + +/** In memory data store is not threadsafe, for these reasons it is ignored, but it is a pretty convenient for local development. */ +class IMGeoTiffAdapterSpec extends AnyFunSpec with Matchers { + lazy val uri: String = "src/test/resources/raster/all-ones.tif" + + /** To run this test change "ignore" to "describe" */ + ignore("In Memory GeoTiffAdapterSpec spatial index spec") { + // This describes how to index the data + val index = new SpatialIndexBuilder().createIndex + + // Register our adapter in a datastore + lazy val geowaveDataStore: DataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions) + lazy val tiff = MultibandGeoTiff(uri) + lazy val dataTypeAdapter = new GeoTiffAdapter("MultibandGeoTiffDataAdapterSpec".typeName) + geowaveDataStore.addType(dataTypeAdapter, index) + + it("should write spatially indexed GeoTiff") { + val indexWriter: Writer[GeoTiff[MultibandTile]] = geowaveDataStore.createWriter(dataTypeAdapter.getTypeName) + try indexWriter.write(tiff) finally { + if (indexWriter != null) indexWriter.close() + } + } + + it("should query the entire contents of a store") { + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should query store by extent") { + val extent = tiff.extent + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should return nothing querying out of bounds") { + val extent = Extent(0, 0, 1, 1) + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should delete nothing by the query out of bounds") { + val extent = Extent(0, 0, 1, 1) + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val dquery = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + geowaveDataStore.delete(dquery.build()) shouldBe true + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + } + + it("should delete everything by the query") { + val extent = tiff.extent + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + geowaveDataStore.delete(query.build()) shouldBe true + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + } + + describe("GeoTiffAdapterSpec serialization spec") { + it("should be serialized properly") { + val expected = new GeoTiffAdapter("MultibandGeoTiffDataAdapterSpec".typeName) + val actual = new GeoTiffAdapter() + + actual.fromBinary(expected.toBinary) + + actual.getTypeName shouldBe expected.getTypeName + actual.getFieldHandlers.map { _.getFieldName } shouldBe expected.getFieldHandlers.map { _.getFieldName } + } + } +} diff --git a/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/SpatialTemporalElevationGeoTiffAdapterSpec.scala b/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/SpatialTemporalElevationGeoTiffAdapterSpec.scala new file mode 100644 index 0000000000..bf125cfe31 --- /dev/null +++ b/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/SpatialTemporalElevationGeoTiffAdapterSpec.scala @@ -0,0 +1,246 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.geotiff + +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.adapter.geotiff.GeoTiffAdapter.GEOTIFF_TIME_FORMATTER_DEFAULT +import geotrellis.geowave.index.SpatialTemporalElevationIndexBuilder +import geotrellis.geowave.api.SQueryBuilder +import geotrellis.geowave.index.query.ExplicitSpatialTemporalElevationQuery +import geotrellis.raster.MultibandTile +import geotrellis.raster.io.geotiff.{GeoTiff, MultibandGeoTiff} +import geotrellis.vector.Extent + +import org.locationtech.geowave.core.geotime.store.query.{ExplicitSpatialQuery, ExplicitSpatialTemporalQuery} +import org.locationtech.geowave.core.store.CloseableIterator +import org.locationtech.geowave.core.store.api.{DataStore, DataStoreFactory, Writer} +import org.locationtech.geowave.datastore.cassandra.config.{CassandraOptions, CassandraRequiredOptions} + +import java.time.ZonedDateTime +import java.util.Date + +import geotrellis.geowave.TestEnvironment +import org.scalatest.matchers.should.Matchers + +class SpatialTemporalElevationGeoTiffAdapterSpec extends TestEnvironment with Matchers { + lazy val uri: String = "src/test/resources/raster/all-ones.tif" + + describe("STDGeoTiffAdapterSpec spatial temporal elevation index spec") { + // This describes how to index the data + val index = new SpatialTemporalElevationIndexBuilder().createIndex + + // Register our adapter in a datastore + lazy val geowaveDataStore: DataStore = DataStoreFactory.createDataStore(new CassandraRequiredOptions(cassandra, "STDGeoTiffAdapterSpec", new CassandraOptions())) + lazy val tiff = MultibandGeoTiff(uri) + lazy val dataTypeAdapter = new GeoTiffAdapter("STDGeoTiffAdapterSpec".typeName) + geowaveDataStore.addType(dataTypeAdapter, index) + + it("should write indexed GeoTiff") { + val indexWriter: Writer[GeoTiff[MultibandTile]] = geowaveDataStore.createWriter(dataTypeAdapter.getTypeName) + try indexWriter.write(tiff) finally { + if (indexWriter != null) indexWriter.close() + } + } + + it("should query the entire contents of a store") { + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should query store by extent") { + val extent = tiff.extent + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should query store by time") { + val extent = tiff.extent + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should query store by elevation") { + val extent = tiff.extent + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val stdQuery = ExplicitSpatialTemporalElevationQuery(1, date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(stdQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should return nothing querying out of the spatial tiff bounds") { + val extent = Extent(0, 0, 1, 1) + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should return nothing querying out of the temporal bounds") { + val extent = tiff.extent + val dateString = "2017:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should query nothing out of the elevation bounds") { + val extent = tiff.extent + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val stdQuery = ExplicitSpatialTemporalElevationQuery(100, date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(stdQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should return nothing querying out of the spatialtemporal bounds") { + val extent = Extent(0, 0, 1, 1) + val dateString = "2017:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should delete nothing by the query out of bounds") { + val extent = Extent(0, 0, 1, 1) + val dateString = "2017:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val dquery = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialTemporalQuery) + + geowaveDataStore.delete(dquery.build()) shouldBe true + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + } + + it("should delete everything by the query") { + val extent = tiff.extent + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val stdQuery = ExplicitSpatialTemporalElevationQuery(1, date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(stdQuery) + + geowaveDataStore.delete(query.build()) shouldBe true + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + } +} diff --git a/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/SpatialTemporalGeoTiffAdapterSpec.scala b/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/SpatialTemporalGeoTiffAdapterSpec.scala new file mode 100644 index 0000000000..ff1cbb5d3e --- /dev/null +++ b/geowave/src/test/scala/geotrellis/geowave/adapter/geotiff/SpatialTemporalGeoTiffAdapterSpec.scala @@ -0,0 +1,206 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.geotiff + +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.adapter.geotiff.GeoTiffAdapter.GEOTIFF_TIME_FORMATTER_DEFAULT +import geotrellis.geowave.api.SQueryBuilder +import geotrellis.raster.MultibandTile +import geotrellis.raster.io.geotiff.{GeoTiff, MultibandGeoTiff} +import geotrellis.vector.Extent +import org.locationtech.geowave.core.geotime.index.api.{SpatialIndexBuilder, SpatialTemporalIndexBuilder} +import org.locationtech.geowave.core.geotime.store.query.{ExplicitSpatialQuery, ExplicitSpatialTemporalQuery} +import org.locationtech.geowave.core.store.CloseableIterator +import org.locationtech.geowave.core.store.api.{DataStore, DataStoreFactory, Writer} +import org.locationtech.geowave.datastore.cassandra.config.{CassandraOptions, CassandraRequiredOptions} +import java.time.ZonedDateTime +import java.util.Date + +import geotrellis.geowave.TestEnvironment +import geotrellis.geowave.api.SQueryBuilder +import org.scalatest.matchers.should.Matchers + +class SpatialTemporalGeoTiffAdapterSpec extends TestEnvironment with Matchers { + lazy val uri: String = "src/test/resources/raster/all-ones.tif" + + describe("SpatialTemporalGeoTiffAdapterSpec spatial temporal index spec") { + // This describes how to index the data + val index = new SpatialTemporalIndexBuilder().createIndex + + // Register our adapter in a datastore + lazy val geowaveDataStore: DataStore = DataStoreFactory.createDataStore(new CassandraRequiredOptions(cassandra, "SpatialTemporalGeoTiffAdapterSpec", new CassandraOptions())) + lazy val tiff = MultibandGeoTiff(uri) + lazy val dataTypeAdapter = new GeoTiffAdapter("SpatialTemporalGeoTiffAdapterSpec".typeName) + geowaveDataStore.addType(dataTypeAdapter, index) + + it("should write spatially temporal indexed GeoTiff") { + val indexWriter: Writer[GeoTiff[MultibandTile]] = geowaveDataStore.createWriter(dataTypeAdapter.getTypeName) + try indexWriter.write(tiff) finally { + if (indexWriter != null) indexWriter.close() + } + } + + it("should query the entire contents of a store") { + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should query store by extent") { + val extent = tiff.extent + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should query store by time") { + val extent = tiff.extent + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should return nothing querying out of the spatial tiff bounds") { + val extent = Extent(0, 0, 1, 1) + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should return nothing querying out of the temporal bounds") { + val extent = tiff.extent + val dateString = "2017:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should return nothing querying out of bounds") { + val extent = Extent(0, 0, 1, 1) + val dateString = "2017:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should delete nothing by the query out of bounds") { + val extent = Extent(0, 0, 1, 1) + val dateString = "2017:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val dquery = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialTemporalQuery) + + geowaveDataStore.delete(dquery.build()) shouldBe true + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + } + + it("should delete everything by the query") { + val extent = tiff.extent + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialTemporalQuery) + + geowaveDataStore.delete(query.build()) shouldBe true + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + } +} diff --git a/geowave/src/test/scala/geotrellis/geowave/adapter/raster/IMMultibandRasterAdapterSpec.scala b/geowave/src/test/scala/geotrellis/geowave/adapter/raster/IMMultibandRasterAdapterSpec.scala new file mode 100644 index 0000000000..e9306648a1 --- /dev/null +++ b/geowave/src/test/scala/geotrellis/geowave/adapter/raster/IMMultibandRasterAdapterSpec.scala @@ -0,0 +1,154 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.raster + +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.api.SQueryBuilder +import geotrellis.geowave.api.SQueryBuilder +import geotrellis.raster.io.geotiff.MultibandGeoTiff +import geotrellis.raster.{MultibandTile, Raster} +import geotrellis.vector.Extent +import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder +import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery +import org.locationtech.geowave.core.store.CloseableIterator +import org.locationtech.geowave.core.store.api.{DataStore, DataStoreFactory, Writer} +import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers + +/** In memory data store is not threadsafe, for these reasons it is ignored, but it is a pretty convenient for local development. */ +class IMMultibandRasterAdapterSpec extends AnyFunSpec with Matchers { + lazy val uri: String = "src/test/resources/raster/all-ones.tif" + + /** To run this test change "ignore" to "describe" */ + ignore("In Memory MultibandRasterAdapterSpec spatial index spec") { + // This describes how to index the data + val index = new SpatialIndexBuilder().createIndex + + // Register our adapter in a datastore + lazy val geowaveDataStore: DataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions) + lazy val raster = MultibandGeoTiff(uri).raster + lazy val dataTypeAdapter = new MulitbandRasterAdapter("MultibandRasterDataAdapterSpec".typeName) + geowaveDataStore.addType(dataTypeAdapter, index) + + it("should write spatially indexed Raster") { + val indexWriter: Writer[Raster[MultibandTile]] = geowaveDataStore.createWriter(dataTypeAdapter.getTypeName) + try indexWriter.write(raster) finally { + if (indexWriter != null) indexWriter.close() + } + } + + it("should query the entire contents of a store") { + val query = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[Raster[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualRaster = it.next() + assert(actualRaster == raster) + it.hasNext shouldBe false + } + + it("should query store by extent") { + val extent = raster.extent + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + val it: CloseableIterator[Raster[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualRaster = it.next() + assert(actualRaster == raster) + it.hasNext shouldBe false + } + + it("should return nothing querying out of the tiff bounds") { + val extent = Extent(0, 0, 1, 1) + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + val it: CloseableIterator[Raster[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should delete nothing by the query out of bounds") { + val extent = Extent(0, 0, 1, 1) + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val dquery = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + geowaveDataStore.delete(dquery.build()) shouldBe true + + val query = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[Raster[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + } + + it("should delete everything by the query") { + val extent = raster.extent + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + geowaveDataStore.delete(query.build()) shouldBe true + + val it: CloseableIterator[Raster[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + } + + describe("MultibandRasterAdapterSpec serialization spec") { + it("should be serialized properly") { + val expected = new MulitbandRasterAdapter("MultibandRasterDataAdapterSpec".typeName) + val actual = new MulitbandRasterAdapter() + + actual.fromBinary(expected.toBinary) + + actual.getTypeName shouldBe expected.getTypeName + actual.getFieldHandlers.map { _.getFieldName } shouldBe expected.getFieldHandlers.map { _.getFieldName } + } + } +} diff --git a/geowave/src/test/scala/geotrellis/geowave/adapter/raster/MultibandRasterAdapterSpec.scala b/geowave/src/test/scala/geotrellis/geowave/adapter/raster/MultibandRasterAdapterSpec.scala new file mode 100644 index 0000000000..3433ff827c --- /dev/null +++ b/geowave/src/test/scala/geotrellis/geowave/adapter/raster/MultibandRasterAdapterSpec.scala @@ -0,0 +1,153 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.adapter.raster + +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.api.SQueryBuilder +import geotrellis.raster.io.geotiff.MultibandGeoTiff +import geotrellis.raster.{MultibandTile, Raster} +import geotrellis.raster.testkit.RasterMatchers +import geotrellis.vector.Extent +import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder +import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery +import org.locationtech.geowave.core.store.CloseableIterator +import org.locationtech.geowave.core.store.api.{DataStore, DataStoreFactory, Writer} +import org.locationtech.geowave.datastore.cassandra.config.{CassandraOptions, CassandraRequiredOptions} +import geotrellis.geowave.TestEnvironment +import geotrellis.geowave.api.SQueryBuilder +import org.scalatest.matchers.should.Matchers + +class MultibandRasterAdapterSpec extends TestEnvironment with RasterMatchers with Matchers { + lazy val uri: String = "src/test/resources/raster/all-ones.tif" + + describe("MultibandRasterAdapterSpec spatial index spec") { + // This describes how to index the data + val index = new SpatialIndexBuilder().createIndex + + // Register our adapter in a datastore + lazy val geowaveDataStore: DataStore = DataStoreFactory.createDataStore(new CassandraRequiredOptions(cassandra, "MultibandRasterAdapterSpec", new CassandraOptions())) + lazy val raster = MultibandGeoTiff(uri).raster + lazy val dataTypeAdapter = new MulitbandRasterAdapter("MultibandRasterAdapterSpec".typeName) + geowaveDataStore.addType(dataTypeAdapter, index) + + it("should write spatially indexed Raster") { + val indexWriter: Writer[Raster[MultibandTile]] = geowaveDataStore.createWriter(dataTypeAdapter.getTypeName) + try indexWriter.write(raster) finally { + if (indexWriter != null) indexWriter.close() + } + } + + it("should query the entire contents of a store") { + val query = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[Raster[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualRaster = it.next() + assertEqual(actualRaster, raster) + it.hasNext shouldBe false + } + + it("should query store by extent") { + val extent = raster.extent + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + val it: CloseableIterator[Raster[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualRaster = it.next() + assertEqual(actualRaster, raster) + it.hasNext shouldBe false + } + + it("should return nothing querying out of the tiff bounds") { + val extent = Extent(0, 0, 1, 1) + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + val it: CloseableIterator[Raster[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should delete nothing by the query out of bounds") { + val extent = Extent(0, 0, 1, 1) + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val dquery = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + geowaveDataStore.delete(dquery.build()) shouldBe true + + val query = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + + val it: CloseableIterator[Raster[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + } + + it("should delete everything by the query") { + val extent = raster.extent + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[Raster[MultibandTile]] + .addTypeName(dataTypeAdapter.getTypeName) + .indexName(index.getName) + .constraints(spatialQuery) + + geowaveDataStore.delete(query.build()) shouldBe true + + val it: CloseableIterator[Raster[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + } + + describe("MultibandRasterAdapterSpec serialization spec") { + it("should be serialized properly") { + val expected = new MulitbandRasterAdapter("MultibandRasterAdapterSpec".typeName) + val actual = new MulitbandRasterAdapter() + + actual.fromBinary(expected.toBinary) + + actual.getTypeName shouldBe expected.getTypeName + actual.getFieldHandlers.map { _.getFieldName } shouldBe expected.getFieldHandlers.map { _.getFieldName } + } + } +} diff --git a/geowave/src/test/scala/geotrellis/geowave/dsl/MessagesSpec.scala b/geowave/src/test/scala/geotrellis/geowave/dsl/MessagesSpec.scala new file mode 100644 index 0000000000..4795d897fd --- /dev/null +++ b/geowave/src/test/scala/geotrellis/geowave/dsl/MessagesSpec.scala @@ -0,0 +1,173 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.adapter.geotiff.GeoTiffAdapter.GEOTIFF_TIME_FORMATTER_DEFAULT +import geotrellis.geowave.dsl.json.{JsonValidator, JsonValidatorErrors} + +import cats.syntax.option._ +import io.circe.syntax._ +import org.locationtech.geowave.core.index.sfc.data.NumericRange +import org.locationtech.geowave.core.geotime.store.dimension.Time.TimeRange +import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter +import geotrellis.vector._ + +import java.net.URI +import java.time.ZonedDateTime + +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers +import _root_.io.circe.JsonObject +import _root_.io.circe.Codec + +class MessagesSpec extends AnyFunSpec with Matchers { + describe("MessagesSpec") { + + it("should build GeoTiff ingest and index messages") { + val ingestParameters = IngestParameters[TilingBounds]( + typeName = "GeoTiffTest".typeName, + dataType = "GeoTiff".dataType, + uri = new URI("file://path/to/file"), + options = TilingBounds(depth = 1.some).some, + namespace = "GeoTiffTestMessagesSpec".some + ) + + val indexParameters = IndexParameters( + indices = IndexDefinition( + indexName = None, + indexType = "spatial-temporal-depth".indexType, + indexOptions = Map("periodTemporal" -> "Year", "maxDepth" -> "100") + ), + // should be used in the ingest message as a name to create an instance of adapter with a proper index + typeName = "GeoTiffTest".typeName, + dataType = "GeoTiff".dataType, + namespace = "GeoTiffTestMessagesSpec".some + ) + ingestParameters.asJson shouldBe JsonValidator.parseUnsafe[IngestParameters[TilingBounds]](ingestParameters.asJson.spaces4).asJson + indexParameters.asJson shouldBe JsonValidator.parseUnsafe[IndexParameters](indexParameters.asJson.spaces4).asJson + } + + it("should not build uingest and index messages for the unsupported type") { + val exception = intercept[JsonValidatorErrors] { + val ingestParameters = IngestParameters[TilingBounds]( + typeName = "NewTest".typeName, + dataType = "New".dataType, + uri = new URI("file://path/to/file"), + options = TilingBounds( + depth = 1.some, + spissitude = 1.some + ).some, + namespace = "NewTestMessagesSpec".some + ) + + val indexParameters = IndexParameters( + indices = IndexDefinition( + indexName = None, + indexType = "spatial-temporal-depth".indexType, + indexOptions = Map("periodTemporal" -> "Year", "maxDepth" -> "100") + ), + // should be used in the ingest message as a name to create an instance of adapter with a proper index + typeName = "NewTest".typeName, + dataType = "New".dataType, + namespace = "NewTestMessagesSpec".some + ) + + ingestParameters.asJson shouldBe JsonValidator.parseUnsafe[IngestParameters[String]](ingestParameters.asJson.spaces4).asJson + indexParameters.asJson shouldBe JsonValidator.parseUnsafe[IndexParameters](indexParameters.asJson.spaces4).asJson + } + + exception.getMessage shouldBe "Invalid DataType: New; available DataTypes: GeoTiff: DownField(dataType)" + } + } + + it("should parse delete message") { + val deleteExpected = """ + |{ + | "typeName" : "DeleteTest", + | "indexName": "TestIndex", + | "geometry" : { + | "type" : "Point", + | "coordinates" : [ + | 1.0, + | 1.0 + | ] + | }, + | "namespace" : "testnamesapce", + | "time" : { + | "min" : "1970-01-01T00:00:00Z", + | "max" : "2019-11-01T12:00:00Z" + | }, + | "elevation" : { + | "min" : 0.0, + | "max" : 30000.0 + | }, + | "compareOp" : "INTERSECTS" + |} + |""".stripMargin + + + val date = ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse("2019:11:01 12:00:00")).toInstant + + val deleteParameters = DeleteParameters( + typeName = "DeleteTest".typeName, + indexName = "TestIndex", + geometry = Point(1, 1).some, + namespace = "testnamesapce".some, + time = new TimeRange(0, date.toEpochMilli).some, + elevation = new NumericRange(0d, 30000d).some, + compareOp = BasicQueryFilter.BasicQueryCompareOperation.INTERSECTS + ) + + deleteParameters.asJson shouldBe JsonValidator.parseUnsafe[DeleteParameters](deleteExpected).asJson + } + + it("should not parse incorrect IndexParameters") { + JsonValidator.parse[IndexParameters]("{}") match { + case Right(_) => throw new Exception("This test should fail.") + case Left(e) => e.toList.map(_.getMessage) shouldBe List( + "#: 3 schema violations found", + "#: required key [indices] not found", + "#: required key [typeName] not found", + "#: required key [dataType] not found" + ) + } + } + + it("should not parse incorrect IngestParameters") { + JsonValidator.parse[IngestParameters[String]]("{}") match { + case Right(_) => throw new Exception("This test should fail.") + case Left(e) => e.toList.map(_.getMessage) shouldBe List( + "#: 3 schema violations found", + "#: required key [typeName] not found", + "#: required key [dataType] not found", + "#: required key [uri] not found" + ) + } + } + + it("should not parse incorrect DeleteParameters") { + JsonValidator.parse[DeleteParameters]("{}") match { + case Right(_) => throw new Exception("This test should fail.") + case Left(e) => e.toList.map(_.getMessage) shouldBe List( + "#: 2 schema violations found", + "#: required key [typeName] not found", + "#: required key [indexName] not found" + ) + } + } +} diff --git a/geowave/src/test/scala/geotrellis/geowave/dsl/VoxelBoundsSpec.scala b/geowave/src/test/scala/geotrellis/geowave/dsl/VoxelBoundsSpec.scala new file mode 100644 index 0000000000..9228279622 --- /dev/null +++ b/geowave/src/test/scala/geotrellis/geowave/dsl/VoxelBoundsSpec.scala @@ -0,0 +1,68 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.dsl + +import cats.syntax.option._ + +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers + +class VoxelBoundsSpec extends AnyFunSpec with Matchers { + describe("VoxelSpec") { + it("should build split VoxelBounds2D") { + val bounds = VoxelDimensions2D(100, 100).toVoxelBounds + val splits = bounds.split(VoxelDimensions2D(10, 10)) + + splits.length shouldBe 10 * 10 + splits.foreach { vb => + (vb.colMax - vb.colMin) shouldBe 10 - 1 + (vb.rowMax - vb.rowMin) shouldBe 10 - 1 + } + } + + it("should build split VoxelBounds3D") { + val bounds = VoxelDimensions3D(100, 100, 100).toVoxelBounds + val splits = bounds.split(VoxelDimensions3D(10, 10, 10)) + + splits.length shouldBe 10 * 10 * 10 + splits.foreach { vb => + (vb.colMax - vb.colMin) shouldBe 10 - 1 + (vb.rowMax - vb.rowMin) shouldBe 10 - 1 + (vb.depthMax - vb.depthMin) shouldBe 10 - 1 + } + } + + it("should build split VoxelBounds4D") { + val bounds = VoxelDimensions4D(100, 100, 100, 100).toVoxelBounds + val splits = bounds.split(VoxelDimensions4D(10, 10, 10, 10)) + + splits.length shouldBe 10 * 10 * 10 * 10 + splits.foreach { vb => + (vb.colMax - vb.colMin) shouldBe 10 - 1 + (vb.rowMax - vb.rowMin) shouldBe 10 - 1 + (vb.depthMax - vb.depthMin) shouldBe 10 - 1 + (vb.spissitudeMax - vb.spissitudeMin) shouldBe 10 - 1 + } + } + + it("should create valid VoxelDimensions from TilingBounds") { + val tb = TilingBounds(depth = 1.some) + val bounds = VoxelBounds3D(0, 100, 0, 100, 0, 100).toVoxelDimensions + bounds.withTilingBounds(tb) shouldBe VoxelDimensions3D(100, 100, 1) + } + } +} diff --git a/geowave/src/test/scala/geotrellis/geowave/ingest/IngestGeoTiffSpec.scala b/geowave/src/test/scala/geotrellis/geowave/ingest/IngestGeoTiffSpec.scala new file mode 100644 index 0000000000..4a898c15e9 --- /dev/null +++ b/geowave/src/test/scala/geotrellis/geowave/ingest/IngestGeoTiffSpec.scala @@ -0,0 +1,303 @@ +/* + * Copyright 2021 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.geowave.ingest + +import geotrellis.geowave.dsl._ +import geotrellis.geowave.dsl.syntax._ +import geotrellis.geowave.adapter.geotiff._ +import geotrellis.geowave.adapter.geotiff.GeoTiffAdapter.GEOTIFF_TIME_FORMATTER_DEFAULT +import geotrellis.geowave.api.SQueryBuilder +import geotrellis.geowave.index.query.ExplicitSpatialTemporalElevationQuery +import geotrellis.raster.MultibandTile +import geotrellis.raster.io.geotiff.GeoTiff + +import org.locationtech.geowave.core.geotime.store.query.{ExplicitSpatialQuery, ExplicitSpatialTemporalQuery} +import org.locationtech.geowave.core.store.CloseableIterator +import org.locationtech.geowave.core.index.sfc.data.NumericRange +import org.locationtech.geowave.core.geotime.store.dimension.Time.TimeRange +import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter +import cats.syntax.option._ +import geotrellis.vector._ +import cats.Id + +import java.net.URI +import java.time.ZonedDateTime +import java.util.Date + +import geotrellis.geowave.TestEnvironment +import org.scalatest.matchers.should.Matchers +import cats.effect.IO + +class IngestGeoTiffSpec extends TestEnvironment with Matchers { + lazy val uri: String = "src/test/resources/raster/all-ones.tif" + + val ingestParameters = IngestParameters[TilingBounds]( + typeName = "IngestMultibandGeoTiffSpec".typeName, + dataType = "GeoTiff".dataType, + uri = new URI(uri), + options = TilingBounds(depth = 1.some).some, + namespace = "IngestMultibandGeoTiffSpec".some + ) + + val indexParameters = IndexParameters( + indices = IndexDefinition( + indexName = "IngestMultibandGeoTiffSpec".some, + indexType = "spatial_temporal_elevation".indexType, + indexOptions = Map() + ), + typeName = "IngestMultibandGeoTiffSpec".typeName, + dataType = "GeoTiff".dataType, + namespace = "IngestMultibandGeoTiffSpec".some + ) + + val date = ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse("2019:11:01 12:00:00")).toInstant + + val deleteParameters = DeleteParameters( + typeName = "IngestMultibandGeoTiffSpec".typeName, + indexName = "IngestMultibandGeoTiffSpec", + geometry = + """ + |{"type":"Polygon","coordinates":[[[141.7066666666667,-18.373333333333342],[141.7066666666667,-17.52000000000001],[142.56000000000003,-17.52000000000001],[142.56000000000003,-18.373333333333342],[141.7066666666667,-18.373333333333342]]]} + |""".stripMargin.parseGeoJson[Polygon]().some, + namespace = "IngestMultibandGeoTiffSpec".some, + time = new TimeRange(0, date.toEpochMilli).some, + elevation = new NumericRange(0d, 30000d).some, + compareOp = BasicQueryFilter.BasicQueryCompareOperation.INTERSECTS + ) + + describe("IngestMultibandGeoTiffSpec spatial temporal elevation index spec") { + val index = ConfigureIndex(indexParameters).head + val indexName = indexParameters.indices.flatMap(_.indexName).head + + // default behavior is to read the whole file as a single tile + val iterator = IngestGeoTiff.dataTypeReader[IO].read(new URI(uri), options=None).unsafeRunSync() + val tiff = iterator.next() + lazy val geowaveDataStore = ingestParameters.dataStore + + it("index name should be set as expected") { + indexName shouldBe indexName + } + + it("should write indexed GeoTiff") { + IngestGeoTiff(ingestParameters) + } + + it("should query the entire contents of a store") { + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should query store by extent") { + val extent = tiff.extent + val spatialQuery = new ExplicitSpatialQuery(extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + .constraints(spatialQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should query store by time") { + val extent = tiff.extent + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should query store by elevation") { + val extent = tiff.extent + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val stdQuery = ExplicitSpatialTemporalElevationQuery(1, date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + .constraints(stdQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + val actualTiff = it.next() + + actualTiff.toByteArray should contain theSameElementsAs tiff.toByteArray + it.hasNext shouldBe false + } + + it("should return nothing querying out of the spatial tiff bounds") { + val extent = Extent(0, 0, 1, 1) + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should return nothing querying out of the temporal bounds") { + val extent = tiff.extent + val dateString = "2017:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should query nothing out of the elevation bounds") { + val extent = tiff.extent + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val stdQuery = ExplicitSpatialTemporalElevationQuery(100, date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + .constraints(stdQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should return nothing querying out of the spatialtemporal bounds") { + val extent = Extent(0, 0, 1, 1) + val dateString = "2017:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + .constraints(spatialTemporalQuery) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should delete nothing by the query out of bounds") { + val extent = Extent(0, 0, 1, 1) + val dateString = "2017:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val dquery = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + .constraints(spatialTemporalQuery) + + geowaveDataStore.delete(dquery.build()) shouldBe true + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe true + } + + it("should delete everything by the query") { + val extent = tiff.extent + val dateString = "2019:11:01 12:00:00" + val date = Date.from(ZonedDateTime.from(GEOTIFF_TIME_FORMATTER_DEFAULT.parse(dateString)).toInstant) + val spatialTemporalQuery = new ExplicitSpatialTemporalQuery(date, date, extent.toPolygon()) + + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + .constraints(spatialTemporalQuery) + + geowaveDataStore.delete(query.build()) shouldBe true + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + + it("should delete everything using the DeleteParameters") { + // write it again + IngestGeoTiff(ingestParameters) + // execute the delete query + ExecuteQuery.delete(deleteParameters) + + // should query nothing + val query = + SQueryBuilder + .newBuilder[GeoTiff[MultibandTile]] + .addTypeName(indexParameters.typeName) + .indexName(indexName) + + val it: CloseableIterator[GeoTiff[MultibandTile]] = geowaveDataStore.query(query.build()) + it.hasNext shouldBe false + } + } +} diff --git a/geowave/src/test/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDReaderSpec.scala b/geowave/src/test/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDReaderSpec.scala deleted file mode 100644 index c47cb08e5b..0000000000 --- a/geowave/src/test/scala/geotrellis/spark/store/geowave/GeoWaveFeatureRDDReaderSpec.scala +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright 2016 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package geotrellis.spark.store.geowave - -import geotrellis.spark._ -import geotrellis.vector._ - -import org.apache.accumulo.core.client.mock.MockInstance -import org.scalatest._ -import org.scalatest.Matchers._ -import org.locationtech.jts.{geom => jts} -import mil.nga.giat.geowave.core.geotime.ingest._ -import mil.nga.giat.geowave.core.store._ -import mil.nga.giat.geowave.core.store.query._ -import mil.nga.giat.geowave.core.geotime.store.query.SpatialQuery -import mil.nga.giat.geowave.core.store.index.PrimaryIndex -import mil.nga.giat.geowave.core.geotime.GeometryUtils -import mil.nga.giat.geowave.datastore.accumulo._ -import mil.nga.giat.geowave.datastore.accumulo.index.secondary.AccumuloSecondaryIndexDataStore -import mil.nga.giat.geowave.datastore.accumulo.metadata._ -import mil.nga.giat.geowave.adapter.vector._ -import mil.nga.giat.geowave.mapreduce.input._ -import mil.nga.giat.geowave.core.store.operations.remote.options.DataStorePluginOptions -import mil.nga.giat.geowave.datastore.accumulo.operations.config.AccumuloRequiredOptions -import mil.nga.giat.geowave.datastore.accumulo.operations.config.AccumuloOptions -import org.geotools.feature._ -import org.geotools.feature.simple._ -import org.opengis.feature.simple._ -import org.apache.hadoop.mapreduce.Job -import org.apache.hadoop.conf.Configuration -import org.apache.spark._ -import org.apache.spark.rdd._ - -import scala.util.Properties - -object GeoWaveFeatureRDDReaderSpec { - implicit def id(x: Map[String, Any]) : Seq[(String, Any)] = x.toSeq -} - -/** - * This set of tests depend on a running accumulo + zookeeper instance available on - * port 20000. Obviously, this makes unit testing rather difficult. Compromises - * become a necessity. In this case, we depend on an external process to set the - * stage for testing. In particular (from the root of the GeoTrellis repository) - * `/scripts/runTestDBs` ought to be run prior to this suite's being run. - */ -class GeoWaveFeatureRDDReaderSpec - extends FunSpec - with GeoWaveTestEnvironment -{ - - import GeoWaveFeatureRDDReaderSpec.id - - describe("GeoTrellis read/write with GeoWave") { - it("Should roundtrip geowave records in accumulo") { - - // Build simple feature type - val builder = new SimpleFeatureTypeBuilder() - val ab = new AttributeTypeBuilder() - builder.setName("TestType") - builder.add(ab.binding(classOf[jts.Point]).nillable(false).buildDescriptor("geometry")) - - val features = (1 to 100) - .map { x: Int => Feature(Point(x, 40), Map[String, Any]()) } - .toArray - val featureRDD = sc.parallelize(features) - val zookeeper = "localhost:21810" - val instanceName = "instance" - val username = "root" - val password = "password" - val featureType = builder.buildFeatureType() - - GeoWaveFeatureRDDWriter.write( - featureRDD, - zookeeper, - instanceName, - username, - password, - "testpoint", - featureType - ) - - val count: Long = GeoWaveFeatureRDDReader.read[Point]( - zookeeper, - instanceName, - username, - password, - "testpoint", - featureType - ).count() - - count should equal ((1 to 100).size) - - val read = GeoWaveFeatureRDDReader.read[Point]( - zookeeper, - instanceName, - username, - password, - "testpoint", - featureType - ).first() - - read.geom should equal (Point(1, 40)) - } - } -} diff --git a/geowave/src/test/scala/geotrellis/spark/store/geowave/GeoWaveSpatialSpec.scala b/geowave/src/test/scala/geotrellis/spark/store/geowave/GeoWaveSpatialSpec.scala deleted file mode 100644 index b63ccd850f..0000000000 --- a/geowave/src/test/scala/geotrellis/spark/store/geowave/GeoWaveSpatialSpec.scala +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright 2016 Azavea - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package geotrellis.spark.store.geowave - -import geotrellis.raster.Tile -import geotrellis.layer._ -import geotrellis.store._ -import geotrellis.store.index.ZCurveKeyIndexMethod -import geotrellis.spark._ -import geotrellis.spark.store._ -import geotrellis.spark.store.accumulo.SocketWriteStrategy -import geotrellis.spark.testkit.testfiles.TestFiles -import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter -import mil.nga.giat.geowave.core.geotime.ingest._ -import mil.nga.giat.geowave.core.store._ -import mil.nga.giat.geowave.datastore.accumulo._ -import org.geotools.coverage.grid._ -import org.geotools.gce.geotiff._ -import org.opengis.coverage.grid.GridCoverage -import org.opengis.parameter.GeneralParameterValue -import org.scalatest._ - -class GeoWaveSpatialSpec - extends FunSpec - with Matchers - with BeforeAndAfterAll - with GeoWaveTestEnvironment -{ - - val gwNamespace = "TEST" - - val attributeStore = new GeoWaveAttributeStore( - "leader:21810", - "instance", - "root", - "password", - gwNamespace - ) - - val reader = new GeoWaveLayerReader(attributeStore) - val writer = new GeoWaveLayerWriter(attributeStore, SocketWriteStrategy()) - val coverageName = "Sample Elevation 1" - val id1 = LayerId(coverageName, 11) - val id2 = LayerId("Sample Elevation 2", 11) - - def getGridCoverage2D(filename: String): GridCoverage2D = { - val file = new java.io.File(filename) - val params = Array[GeneralParameterValue]() - - new GeoTiffReader(file).read(params) - } - - def poke(bo: BasicAccumuloOperations, img: GridCoverage2D): Unit = { - val metadata = new java.util.HashMap[String, String]() - val dataStore = new AccumuloDataStore(bo) - val index = (new SpatialDimensionalityTypeProvider.SpatialIndexBuilder).createIndex() - val adapter = new RasterDataAdapter(coverageName, metadata, img, 256, true) // img only used for metadata, not data - val indexWriter = dataStore.createWriter(adapter, index).asInstanceOf[IndexWriter[GridCoverage]] - - indexWriter.write(img) - indexWriter.close - } - - def clear(): Unit = { - attributeStore.delete(s"${gwNamespace}_GEOWAVE_METADATA") - attributeStore.delete(s"${gwNamespace}_SPATIAL_IDX") - } - - it("should not find layer before write") { - intercept[LayerNotFoundError] { - reader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](id1) - } - } - - it("should read an existing layer") { - val img = getGridCoverage2D("spark/src/test/resources/elevation.tif") - val bo = attributeStore.basicAccumuloOperations - - poke(bo, img) - - val layer = reader - .read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](id1) - .map({ kv => 1 }) - .collect() - - layer.length should be (6) - } - - it("should write a layer") { - val layer = reader.read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](id1) - - writer.write(id2, layer) - } - - it("should read a layer back") { - val original = reader - .read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](id1) - .keys.count - val geowave = reader - .read[SpatialKey, Tile, TileLayerMetadata[SpatialKey]](id2) - .keys.count - - original should be (geowave) - } - - it("should clean up after itself") { - clear - } -} diff --git a/layer/src/main/scala/geotrellis/layer/LayoutTileSource.scala b/layer/src/main/scala/geotrellis/layer/LayoutTileSource.scala index 7837674488..12813f4d6e 100644 --- a/layer/src/main/scala/geotrellis/layer/LayoutTileSource.scala +++ b/layer/src/main/scala/geotrellis/layer/LayoutTileSource.scala @@ -144,7 +144,7 @@ class LayoutTileSource[K: SpatialComponent]( /** Read all available tiles */ def readAll(): Iterator[(K, MultibandTile)] = - readAll(keys.toIterator) + readAll(keys.iterator) /** Set of keys that can be read from this tile source */ def keys: Set[K] = { @@ -174,7 +174,7 @@ class LayoutTileSource[K: SpatialComponent]( /** All intersecting RasterRegions with their respective keys */ def keyedRasterRegions(): Iterator[(K, RasterRegion)] = keys - .toIterator + .iterator .flatMap { key => val result = rasterRegionForKey(key) result.map { region => (key, region) } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index bdad721d6a..f4a3dfba16 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -17,18 +17,18 @@ import sbt._ object Version { - val geotools = "24.2" + val geotools = "23.2" val spire = "0.17.0" val accumulo = "1.9.3" val cassandra = "3.7.2" val hbase = "2.4.2" val geomesa = "2.3.1" - val geowave = "0.9.3" + val geowave = "1.2.0" val hadoop = "3.2.1" val gdal = "3.1.0" val gdalWarp = "1.1.1" - val previousVersion = "3.5.0" + val previousVersion = "3.5.2" } import sbt.Keys._ @@ -53,7 +53,10 @@ object Dependencies { } def circe(module: String) = Def.setting { - "io.circe" %% s"circe-$module" % "0.13.0" + module match { + case "json-schema" => "io.circe" %% s"circe-$module" % "0.1.0" + case _ => "io.circe" %% s"circe-$module" % "0.13.0" + } } def fs2(module: String) = Def.setting { @@ -71,7 +74,7 @@ object Dependencies { val scalatest = "org.scalatest" %% "scalatest" % "3.2.5" val scalacheck = "org.scalacheck" %% "scalacheck" % "1.15.2" val scalaXml = "org.scala-lang.modules" %% "scala-xml" % "1.3.0" - val jts = "org.locationtech.jts" % "jts-core" % "1.17.1" + val jts = "org.locationtech.jts" % "jts-core" % "1.16.1" val proj4j = "org.locationtech.proj4j" % "proj4j" % "1.1.2" val openCSV = "com.opencsv" % "opencsv" % "5.3" val spire = "org.typelevel" %% "spire" % Version.spire @@ -87,9 +90,10 @@ object Dependencies { val jsonSchemaValidator = "com.networknt" % "json-schema-validator" % "0.1.23" val accumuloCore = "org.apache.accumulo" % "accumulo-core" % Version.accumulo val sl4jnop = "org.slf4j" % "slf4j-nop" % "1.7.25" + val logbackClassic = "ch.qos.logback" % "logback-classic" % "1.2.3" val cassandraDriverCore = "com.datastax.cassandra" % "cassandra-driver-core" % Version.cassandra val guava = "com.google.guava" % "guava" % "16.0.1" - + val geomesaJobs = "org.locationtech.geomesa" %% "geomesa-jobs" % Version.geomesa val geomesaAccumuloJobs = "org.locationtech.geomesa" %% "geomesa-accumulo-jobs" % Version.geomesa val geomesaAccumuloDatastore = "org.locationtech.geomesa" %% "geomesa-accumulo-datastore" % Version.geomesa @@ -106,15 +110,19 @@ object Dependencies { val geotoolsShapefile = "org.geotools" % "gt-shapefile" % Version.geotools val geotoolsMetadata = "org.geotools" % "gt-metadata" % Version.geotools val geotoolsOpengis = "org.geotools" % "gt-opengis" % Version.geotools - + // located in the OSGeo repo: https://repo.osgeo.org/repository/release/ val jaiCore = "javax.media" % "jai_core" % "1.1.3" - val geowaveRaster = "mil.nga.giat" % "geowave-adapter-raster" % Version.geowave - val geowaveVector = "mil.nga.giat" % "geowave-adapter-vector" % Version.geowave - val geowaveStore = "mil.nga.giat" % "geowave-core-store" % Version.geowave - val geowaveGeotime = "mil.nga.giat" % "geowave-core-geotime" % Version.geowave - val geowaveAccumulo = "mil.nga.giat" % "geowave-datastore-accumulo" % Version.geowave + val geowaveRaster = "org.locationtech.geowave" % "geowave-adapter-raster" % Version.geowave + val geowaveVector = "org.locationtech.geowave" % "geowave-adapter-vector" % Version.geowave + val geowaveIndex = "org.locationtech.geowave" % "geowave-core-index" % Version.geowave + val geowaveStore = "org.locationtech.geowave" % "geowave-core-store" % Version.geowave + val geowaveGeotime = "org.locationtech.geowave" % "geowave-core-geotime" % Version.geowave + val geowaveAccumulo = "org.locationtech.geowave" % "geowave-datastore-accumulo" % Version.geowave + val geowaveCassandra = "org.locationtech.geowave" % "geowave-datastore-cassandra" % Version.geowave + + val geowaveGuava = "com.google.guava" % "guava" % "25.1-jre" val scalaArm = "com.jsuereth" %% "scala-arm" % "2.0" @@ -150,8 +158,11 @@ object Dependencies { val jacksonModuleScala = "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.6.7" val shapeless = "com.chuusai" %% "shapeless" % "2.3.3" - - val unitApi = "javax.measure" % "unit-api" % "2.1.2" + val newtype = "io.estatico" %% "newtype" % "0.4.4" + + // aligned with the GeoTools version, should be 2.1.2 for GeoTools 24.2 + val unitApi = "javax.measure" % "unit-api" % "1.0" val scalaURI = "io.lemonlabs" %% "scala-uri" % "1.5.1" + val java8Compat = "org.scala-lang.modules" %% "scala-java8-compat" % "0.9.1" } diff --git a/project/Settings.scala b/project/Settings.scala index dc5ef7b599..3988f53b93 100644 --- a/project/Settings.scala +++ b/project/Settings.scala @@ -33,11 +33,12 @@ object Settings { val eclipseReleases = "eclipse-releases" at "https://repo.eclipse.org/content/groups/releases" val osgeoReleases = "osgeo-releases" at "https://repo.osgeo.org/repository/release/" val geosolutions = "geosolutions" at "https://maven.geo-solutions.it/" + val jitpack = "jitpack" at "https://jitpack.io" // for https://github.com/everit-org/json-schema val ivy2Local = Resolver.file("local", file(Path.userHome.absolutePath + "/.ivy2/local"))(Resolver.ivyStylePatterns) val mavenLocal = Resolver.mavenLocal val maven = DefaultMavenRepository val local = Seq(ivy2Local, mavenLocal) - val external = Seq(osgeoReleases, maven, eclipseReleases, geosolutions, apacheSnapshots) + val external = Seq(osgeoReleases, maven, eclipseReleases, geosolutions, jitpack, apacheSnapshots) val all = external ++ local } @@ -93,7 +94,7 @@ object Settings { ).filter(_.asFile.canRead).map(Credentials(_)), addCompilerPlugin("org.typelevel" %% "kind-projector" % "0.11.3" cross CrossVersion.full), - addCompilerPlugin("org.scalameta" % "semanticdb-scalac" % "4.4.10" cross CrossVersion.full), + addCompilerPlugin("org.scalameta" % "semanticdb-scalac" % "4.4.14" cross CrossVersion.full), libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match { case Some((2, 13)) => Nil @@ -303,70 +304,31 @@ object Settings { lazy val geowave = Seq( name := "geotrellis-geowave", libraryDependencies ++= Seq( - accumuloCore - exclude("org.jboss.netty", "netty") - exclude("org.apache.hadoop", "hadoop-client"), - geowaveRaster - excludeAll(ExclusionRule(organization = "org.mortbay.jetty"), - ExclusionRule(organization = "javax.servlet")), - geowaveVector - excludeAll(ExclusionRule(organization = "org.mortbay.jetty"), - ExclusionRule(organization = "javax.servlet")), - geowaveStore - excludeAll(ExclusionRule(organization = "org.mortbay.jetty"), - ExclusionRule(organization = "javax.servlet")), - geowaveGeotime - excludeAll(ExclusionRule(organization = "org.mortbay.jetty"), - ExclusionRule(organization = "javax.servlet")), - geowaveAccumulo - excludeAll(ExclusionRule(organization = "org.mortbay.jetty"), - ExclusionRule(organization = "javax.servlet")), - hadoopClient % Provided - excludeAll(ExclusionRule(organization = "org.mortbay.jetty"), - ExclusionRule(organization = "javax.servlet")), - geotoolsCoverage % Provided - excludeAll(ExclusionRule(organization = "org.mortbay.jetty"), - ExclusionRule(organization = "javax.servlet")), - geotoolsHsql % Provided - excludeAll(ExclusionRule(organization = "org.mortbay.jetty"), - ExclusionRule(organization = "javax.servlet")), - geotoolsMain % Provided - excludeAll(ExclusionRule(organization = "org.mortbay.jetty"), - ExclusionRule(organization = "javax.servlet")), - geotoolsReferencing % Provided - excludeAll(ExclusionRule(organization = "org.mortbay.jetty"), - ExclusionRule(organization = "javax.servlet")), - scalaArm, - kryoSerializers exclude("com.esotericsoftware", "kryo"), - kryoShaded, - apacheSpark("core").value % Provided, - apacheSpark("sql").value % Test, - scalatest % Test + jaiCore, + newtype, + java8Compat, + circe("generic-extras").value, + circe("json-schema").value, + geowaveStore, + geowaveIndex, + geowaveGeotime exclude("javax.media", "jai_core"), + geowaveGuava % Test, // tracking geowave guava requirement + geowaveCassandra % Test, + scalatest % Test, + logbackClassic % Test ), - assembly / assemblyMergeStrategy := { - case "reference.conf" => MergeStrategy.concat - case "application.conf" => MergeStrategy.concat - case PathList("META-INF", xs@_*) => - xs match { - case ("MANIFEST.MF" :: Nil) => MergeStrategy.discard - // Concatenate everything in the services directory to keep GeoTools happy. - case ("services" :: _ :: Nil) => - MergeStrategy.concat - // Concatenate these to keep JAI happy. - case ("javax.media.jai.registryFile.jai" :: Nil) | ("registryFile.jai" :: Nil) | ("registryFile.jaiext" :: Nil) => - MergeStrategy.concat - case (name :: Nil) => { - // Must exclude META-INF/*.([RD]SA|SF) to avoid "Invalid signature file digest for Manifest main attributes" exception. - if (name.endsWith(".RSA") || name.endsWith(".DSA") || name.endsWith(".SF")) - MergeStrategy.discard - else - MergeStrategy.first - } - case _ => MergeStrategy.first - } - case _ => MergeStrategy.first - } - ) ++ commonSettings ++ noForkInTests + Test / fork := true + ) ++ commonSettings + + lazy val geowaveBenchmark = Seq( + name := "geotrellis-geowave-benchmark", + libraryDependencies ++= Seq( + geowaveGuava, // tracking geowave guava requirement + geowaveCassandra, + logbackClassic + ), + Test / fork := true + ) ++ commonSettings lazy val hbase = Seq( name := "geotrellis-hbase", @@ -442,7 +404,7 @@ object Settings { name := "geotrellis-raster", libraryDependencies ++= Seq( squants, - monocle("core").value, + monocle("core").value, monocle("macro").value, scalaXml, scalaURI, @@ -572,7 +534,7 @@ object Settings { lazy val `spark-pipeline` = Seq( name := "geotrellis-spark-pipeline", - libraryDependencies ++= Seq( + libraryDependencies ++= Seq( circe("generic-extras").value, hadoopClient % Provided, apacheSpark("core").value % Provided, @@ -627,8 +589,8 @@ object Settings { jts, shapeless, pureconfig, - circe("core").value, - circe("generic").value, + circe("core").value, + circe("generic").value, circe("parser").value, cats("core").value, apacheMath, @@ -678,14 +640,13 @@ object Settings { name := "geotrellis-store", libraryDependencies ++= Seq( hadoopClient % Provided, - guava, apacheIO, scaffeine, caffeine, uzaygezenCore, scalaXml, apacheLang3, - fs2("core").value, + fs2("core").value, fs2("io").value, cats("effect").value, scalatest % Test diff --git a/project/build.properties b/project/build.properties index e67343ae79..f0be67b9f7 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.0 +sbt.version=1.5.1 diff --git a/project/plugins.sbt b/project/plugins.sbt index 84e7868a8c..8f184e7555 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,13 +1,13 @@ resolvers += sbt.Resolver.bintrayIvyRepo("typesafe", "sbt-plugins") -addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1") +addDependencyTreePlugin addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.15.0") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") -addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.5.2") -addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.5.0") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") +addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.5.3") +addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") addSbtPlugin("com.thesamet" % "sbt-protoc" % "0.99.34") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.26") -addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.18" ) +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.27") +addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.19" ) libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.9.8" diff --git a/raster/src/main/scala/geotrellis/raster/RasterSource.scala b/raster/src/main/scala/geotrellis/raster/RasterSource.scala index 45edf6d351..f40be6959f 100644 --- a/raster/src/main/scala/geotrellis/raster/RasterSource.scala +++ b/raster/src/main/scala/geotrellis/raster/RasterSource.scala @@ -143,24 +143,24 @@ abstract class RasterSource extends CellGrid[Long] with RasterMetadata { * @group read */ def readExtents(extents: Traversable[Extent], bands: Seq[Int]): Iterator[Raster[MultibandTile]] = - extents.toIterator.flatMap(read(_, bands).toIterator) + extents.toIterator.flatMap(read(_, bands).iterator) /** * @group read */ def readExtents(extents: Traversable[Extent]): Iterator[Raster[MultibandTile]] = - readExtents(extents, (0 until bandCount)) + readExtents(extents, 0 until bandCount) /** * @group read */ def readBounds(bounds: Traversable[GridBounds[Long]], bands: Seq[Int]): Iterator[Raster[MultibandTile]] = - bounds.toIterator.flatMap(read(_, bands).toIterator) + bounds.toIterator.flatMap(read(_, bands).iterator) /** * @group read */ def readBounds(bounds: Traversable[GridBounds[Long]]): Iterator[Raster[MultibandTile]] = - bounds.toIterator.flatMap(read(_, (0 until bandCount)).toIterator) + bounds.toIterator.flatMap(read(_, 0 until bandCount).iterator) private[raster] def targetCellType: Option[TargetCellType] diff --git a/raster/src/main/scala/geotrellis/raster/crop/CropMethods.scala b/raster/src/main/scala/geotrellis/raster/crop/CropMethods.scala index 67b9444213..be205f6fc8 100644 --- a/raster/src/main/scala/geotrellis/raster/crop/CropMethods.scala +++ b/raster/src/main/scala/geotrellis/raster/crop/CropMethods.scala @@ -41,7 +41,7 @@ trait CropMethods[T] extends MethodExtensions[T] { * Crop out multiple [[GridBounds]] windows. */ def crop(windows: Seq[GridBounds[Int]]): Iterator[(GridBounds[Int], T)] = { - windows.toIterator.map { gb => (gb, crop(gb))} + windows.iterator.map { gb => (gb, crop(gb))} } /** diff --git a/raster/src/main/scala/geotrellis/raster/io/geotiff/LazySegmentBytes.scala b/raster/src/main/scala/geotrellis/raster/io/geotiff/LazySegmentBytes.scala index 0bca5dd955..6cbda67743 100644 --- a/raster/src/main/scala/geotrellis/raster/io/geotiff/LazySegmentBytes.scala +++ b/raster/src/main/scala/geotrellis/raster/io/geotiff/LazySegmentBytes.scala @@ -113,7 +113,7 @@ class LazySegmentBytes( def getSegments(indices: Traversable[Int]): Iterator[(Int, Array[Byte])] = { val chunks = chunkSegments(indices) chunks - .toIterator + .iterator .flatMap(chunk => readChunk(chunk)) } diff --git a/raster/src/main/scala/geotrellis/raster/io/geotiff/MultibandGeoTiff.scala b/raster/src/main/scala/geotrellis/raster/io/geotiff/MultibandGeoTiff.scala index 97c57b2315..4326fb81e7 100644 --- a/raster/src/main/scala/geotrellis/raster/io/geotiff/MultibandGeoTiff.scala +++ b/raster/src/main/scala/geotrellis/raster/io/geotiff/MultibandGeoTiff.scala @@ -120,7 +120,7 @@ case class MultibandGeoTiff( val storageMethod = Tiled(blockSize, blockSize) val overviewOptions = options.copy(subfileType = Some(ReducedImage), storageMethod = storageMethod) val overviewTile = GeoTiffBuilder[MultibandTile].makeTile( - segments.toIterator, segmentLayout, cellType, options.compression + segments.iterator, segmentLayout, cellType, options.compression ) MultibandGeoTiff(overviewTile, extent, crs, Tags.empty, overviewOptions) diff --git a/raster/src/main/scala/geotrellis/raster/io/geotiff/SinglebandGeoTiff.scala b/raster/src/main/scala/geotrellis/raster/io/geotiff/SinglebandGeoTiff.scala index 7dc0b31907..98206d269f 100644 --- a/raster/src/main/scala/geotrellis/raster/io/geotiff/SinglebandGeoTiff.scala +++ b/raster/src/main/scala/geotrellis/raster/io/geotiff/SinglebandGeoTiff.scala @@ -114,7 +114,7 @@ case class SinglebandGeoTiff( val storageMethod = Tiled(blockSize, blockSize) val overviewOptions = options.copy(subfileType = Some(ReducedImage), storageMethod = storageMethod) val overviewTile = GeoTiffBuilder[Tile].makeTile( - segments.toIterator, segmentLayout, cellType, options.compression + segments.iterator, segmentLayout, cellType, options.compression ) SinglebandGeoTiff(overviewTile, extent, crs, Tags.empty, overviewOptions) diff --git a/raster/src/main/scala/geotrellis/raster/io/geotiff/writer/GeoTiffWriter.scala b/raster/src/main/scala/geotrellis/raster/io/geotiff/writer/GeoTiffWriter.scala index 31291b2b63..a4e1a365fd 100644 --- a/raster/src/main/scala/geotrellis/raster/io/geotiff/writer/GeoTiffWriter.scala +++ b/raster/src/main/scala/geotrellis/raster/io/geotiff/writer/GeoTiffWriter.scala @@ -82,7 +82,7 @@ class GeoTiffWriter(geoTiff: GeoTiffData, dos: DataOutputStream) { def writeDouble(value: Double): Unit = { writeBytes(toBytes(value)) } private def append(list: List[GeoTiffData]): Unit = { - val overviewsIter = (geoTiff +: geoTiff.overviews).toIterator + val overviewsIter = (geoTiff +: geoTiff.overviews).iterator overviewsIter.foreach(append(_, !overviewsIter.hasNext)) } diff --git a/raster/src/test/scala/geotrellis/raster/io/geotiff/GeoTiffBuilerSpec.scala b/raster/src/test/scala/geotrellis/raster/io/geotiff/GeoTiffBuilderSpec.scala similarity index 96% rename from raster/src/test/scala/geotrellis/raster/io/geotiff/GeoTiffBuilerSpec.scala rename to raster/src/test/scala/geotrellis/raster/io/geotiff/GeoTiffBuilderSpec.scala index 7b25c41c1e..7b7715446c 100644 --- a/raster/src/test/scala/geotrellis/raster/io/geotiff/GeoTiffBuilerSpec.scala +++ b/raster/src/test/scala/geotrellis/raster/io/geotiff/GeoTiffBuilderSpec.scala @@ -52,7 +52,7 @@ class GeoTiffBuilderSpec extends AnyFunSpec with RasterMatchers with GeoTiffTest BandInterleave, BandType.forCellType(ct)) - val tiff = GeoTiffBuilder[MultibandTile].makeTile(segments.toIterator, segmentLayout, ct, NoCompression) + val tiff = GeoTiffBuilder[MultibandTile].makeTile(segments.iterator, segmentLayout, ct, NoCompression) val actualTile = tiff.tile.toArrayTile() assertEqual(expectedTile, actualTile) @@ -66,7 +66,7 @@ class GeoTiffBuilderSpec extends AnyFunSpec with RasterMatchers with GeoTiffTest PixelInterleave, BandType.forCellType(ct)) - val tiff = GeoTiffBuilder[MultibandTile].makeTile(segments.toIterator, segmentLayout, ct, NoCompression) + val tiff = GeoTiffBuilder[MultibandTile].makeTile(segments.iterator, segmentLayout, ct, NoCompression) val actualTile = tiff.tile.toArrayTile() assertEqual(expectedTile, actualTile) diff --git a/s3-spark/src/main/scala/geotrellis/spark/store/s3/S3RDDReader.scala b/s3-spark/src/main/scala/geotrellis/spark/store/s3/S3RDDReader.scala index 408ab699a0..d315517fab 100644 --- a/s3-spark/src/main/scala/geotrellis/spark/store/s3/S3RDDReader.scala +++ b/s3-spark/src/main/scala/geotrellis/spark/store/s3/S3RDDReader.scala @@ -70,7 +70,7 @@ class S3RDDReader( val s3Client = this.s3Client val writerSchema = kwWriterSchema.value.getOrElse(_recordCodec.schema) partition flatMap { seq => - GTIOUtils.parJoinEBO[K, V](seq.toIterator)({ index: BigInt => + GTIOUtils.parJoinEBO[K, V](seq.iterator)({ index: BigInt => try { val request = GetObjectRequest.builder() .bucket(bucket) diff --git a/s3/src/main/scala/geotrellis/store/s3/S3CollectionReader.scala b/s3/src/main/scala/geotrellis/store/s3/S3CollectionReader.scala index fdccc1d394..ffa813a899 100644 --- a/s3/src/main/scala/geotrellis/store/s3/S3CollectionReader.scala +++ b/s3/src/main/scala/geotrellis/store/s3/S3CollectionReader.scala @@ -55,7 +55,7 @@ class S3CollectionReader( val recordCodec = KeyValueRecordCodec[K, V] implicit val ec = executionContext - GTIOUtils.parJoin[K, V](ranges.toIterator){ index: BigInt => + GTIOUtils.parJoin[K, V](ranges.iterator){ index: BigInt => try { val getRequest = GetObjectRequest.builder() .bucket(bucket) diff --git a/sbt b/sbt index d97f8e85d4..1aac2d3f62 100755 --- a/sbt +++ b/sbt @@ -34,8 +34,8 @@ set -o pipefail -declare -r sbt_release_version="1.5.0" -declare -r sbt_unreleased_version="1.5.0" +declare -r sbt_release_version="1.5.1" +declare -r sbt_unreleased_version="1.5.1" declare -r latest_213="2.13.5" declare -r latest_212="2.12.13" diff --git a/spark/src/main/scala/geotrellis/spark/RasterSourceRDD.scala b/spark/src/main/scala/geotrellis/spark/RasterSourceRDD.scala index dd7e5d34e6..0cdcb203f0 100644 --- a/spark/src/main/scala/geotrellis/spark/RasterSourceRDD.scala +++ b/spark/src/main/scala/geotrellis/spark/RasterSourceRDD.scala @@ -306,7 +306,7 @@ object RasterSourceRDD { val m = keyExtractor.getMetadata(source) val tileKeyTransform: SpatialKey => K = { sk => keyExtractor.getKey(m, sk) } val tileSource = source.tileToLayout(layout, tileKeyTransform, NearestNeighbor, strategy) - tileSource.readAll(keys.map(tileKeyTransform).toIterator) + tileSource.readAll(keys.map(tileKeyTransform).iterator) } sourcesRDD.unpersist() diff --git a/spark/src/main/scala/geotrellis/spark/rasterize/RasterizeRDD.scala b/spark/src/main/scala/geotrellis/spark/rasterize/RasterizeRDD.scala index c07a8f6f48..56edf24537 100644 --- a/spark/src/main/scala/geotrellis/spark/rasterize/RasterizeRDD.scala +++ b/spark/src/main/scala/geotrellis/spark/rasterize/RasterizeRDD.scala @@ -95,7 +95,7 @@ object RasterizeRDD { // key the geometry to intersecting tiles so it can be rasterized in the map-side combine val keyed: RDD[(SpatialKey, (Feature[Geometry, Double], SpatialKey))] = features.flatMap { feature => - layout.mapTransform.keysForGeometry(feature.geom).toIterator + layout.mapTransform.keysForGeometry(feature.geom).iterator .map(key => (key, (feature, key)) ) } @@ -192,7 +192,7 @@ object RasterizeRDD { // key the geometry to intersecting tiles so it can be rasterized in the map-side combine val keyed: RDD[(SpatialKey, (Feature[Geometry, CellValue], SpatialKey))] = features.flatMap { feature => - layout.mapTransform.keysForGeometry(feature.geom).toIterator + layout.mapTransform.keysForGeometry(feature.geom).iterator .map(key => (key, (feature, key)) ) } diff --git a/spark/src/main/scala/geotrellis/spark/store/cog/COGLayerReader.scala b/spark/src/main/scala/geotrellis/spark/store/cog/COGLayerReader.scala index 6725e5a4df..71cfaf508f 100644 --- a/spark/src/main/scala/geotrellis/spark/store/cog/COGLayerReader.scala +++ b/spark/src/main/scala/geotrellis/spark/store/cog/COGLayerReader.scala @@ -387,7 +387,7 @@ abstract class COGLayerReader[ID] extends Serializable { val keyDecoder = kwDecoder.value partition flatMap { seq => - IOUtils.parJoin[K, R](seq.toIterator) { index: BigInt => + IOUtils.parJoin[K, R](seq.iterator) { index: BigInt => if (!pathExists(keyPath(index))) Vector() else { val uri = fullPath(keyPath(index)) diff --git a/spark/src/main/scala/geotrellis/spark/store/file/FileRDDReader.scala b/spark/src/main/scala/geotrellis/spark/store/file/FileRDDReader.scala index 9533461434..4086964beb 100644 --- a/spark/src/main/scala/geotrellis/spark/store/file/FileRDDReader.scala +++ b/spark/src/main/scala/geotrellis/spark/store/file/FileRDDReader.scala @@ -60,7 +60,7 @@ object FileRDDReader { implicit val ec: ExecutionContext = executionContext partition flatMap { seq => - IOUtils.parJoin[K, V](seq.toIterator) { index: BigInt => + IOUtils.parJoin[K, V](seq.iterator) { index: BigInt => val path = keyPath(index) if (new File(path).exists) { val bytes: Array[Byte] = Filesystem.slurp(path) diff --git a/spark/src/main/scala/geotrellis/spark/store/hadoop/HadoopRDDWriter.scala b/spark/src/main/scala/geotrellis/spark/store/hadoop/HadoopRDDWriter.scala index 655687fe05..f36bd08056 100644 --- a/spark/src/main/scala/geotrellis/spark/store/hadoop/HadoopRDDWriter.scala +++ b/spark/src/main/scala/geotrellis/spark/store/hadoop/HadoopRDDWriter.scala @@ -192,7 +192,7 @@ object HadoopRDDWriter { // Write merged records val writer = new MultiMapWriter(layerPathStr, 33, blockSize, indexInterval) - for ( (index, pairs) <- GroupConsecutiveIterator(kvs.toIterator)(r => keyIndex.toIndex(r._1))) { + for ( (index, pairs) <- GroupConsecutiveIterator(kvs.iterator)(r => keyIndex.toIndex(r._1))) { writer.write( new BigIntWritable(index.toByteArray), new BytesWritable(AvroEncoder.toBinary(pairs.toVector)(codec))) diff --git a/spark/src/test/scala/geotrellis/spark/RasterRegionSpec.scala b/spark/src/test/scala/geotrellis/spark/RasterRegionSpec.scala index fc64756be0..a7b56f6cd4 100644 --- a/spark/src/test/scala/geotrellis/spark/RasterRegionSpec.scala +++ b/spark/src/test/scala/geotrellis/spark/RasterRegionSpec.scala @@ -69,7 +69,7 @@ class RasterRegionSpec extends AnyFunSpec with TestEnvironment with RasterMatche val refRdd = srcRdd.flatMap { src => // too easy? whats missing val tileSource = LayoutTileSource.spatial(src, layout) - tileSource.keys.toIterator.map { key => (key, tileSource.rasterRegionForKey(key).get) } + tileSource.keys.iterator.map { key => (key, tileSource.rasterRegionForKey(key).get) } } // TADA! Jobs done. diff --git a/store/src/main/scala/geotrellis/store/cog/COGCollectionLayerReader.scala b/store/src/main/scala/geotrellis/store/cog/COGCollectionLayerReader.scala index c6af88291a..5a1dcf09e3 100644 --- a/store/src/main/scala/geotrellis/store/cog/COGCollectionLayerReader.scala +++ b/store/src/main/scala/geotrellis/store/cog/COGCollectionLayerReader.scala @@ -216,7 +216,7 @@ object COGCollectionLayerReader { else baseQueryKeyBounds.flatMap(decomposeBounds) - IOUtils.parJoin[K, V](ranges.toIterator) { index: BigInt => + IOUtils.parJoin[K, V](ranges.iterator) { index: BigInt => if (!pathExists(keyPath(index))) Vector() else { val uri = fullPath(keyPath(index)) diff --git a/store/src/main/scala/geotrellis/store/file/FileCollectionReader.scala b/store/src/main/scala/geotrellis/store/file/FileCollectionReader.scala index 267aa99c92..bf3ce1d2f6 100644 --- a/store/src/main/scala/geotrellis/store/file/FileCollectionReader.scala +++ b/store/src/main/scala/geotrellis/store/file/FileCollectionReader.scala @@ -47,7 +47,7 @@ object FileCollectionReader { val includeKey = (key: K) => KeyBounds.includeKey(queryKeyBounds, key)(boundable) val _recordCodec = KeyValueRecordCodec[K, V] - IOUtils.parJoin[K, V](ranges.toIterator) { index: BigInt => + IOUtils.parJoin[K, V](ranges.iterator) { index: BigInt => val path = keyPath(index) if (new File(path).exists) { val bytes: Array[Byte] = Filesystem.slurp(path) diff --git a/store/src/main/scala/geotrellis/store/hadoop/HadoopCollectionReader.scala b/store/src/main/scala/geotrellis/store/hadoop/HadoopCollectionReader.scala index ae9fa63175..9ef7b0edae 100644 --- a/store/src/main/scala/geotrellis/store/hadoop/HadoopCollectionReader.scala +++ b/store/src/main/scala/geotrellis/store/hadoop/HadoopCollectionReader.scala @@ -61,7 +61,7 @@ class HadoopCollectionReader( if (queryKeyBounds.isEmpty) return Seq.empty[(K, V)] val includeKey = (key: K) => KeyBounds.includeKey(queryKeyBounds, key) - val indexRanges = queryKeyBounds.flatMap(decomposeBounds).toIterator + val indexRanges = queryKeyBounds.flatMap(decomposeBounds).iterator val codec = KeyValueRecordCodec[K, V] diff --git a/store/src/main/scala/geotrellis/store/util/BlockingThreadPool.scala b/store/src/main/scala/geotrellis/store/util/BlockingThreadPool.scala index 63baf1105c..db5cac89e1 100644 --- a/store/src/main/scala/geotrellis/store/util/BlockingThreadPool.scala +++ b/store/src/main/scala/geotrellis/store/util/BlockingThreadPool.scala @@ -26,7 +26,7 @@ import scala.util.{Failure, Success, Try} object BlockingThreadPool extends Serializable { case class Config(threads: Int = Runtime.getRuntime.availableProcessors) - implicit val configReader = ConfigReader.fromCursor[Config] { cur => + implicit val configReader: ConfigReader[Config] = ConfigReader.fromCursor[Config] { cur => cur.fluent.at("threads").asString match { case Right("default") => Right(Config()) case Right(th) => Try(th.toInt) match { diff --git a/store/src/main/scala/geotrellis/store/util/IOUtils.scala b/store/src/main/scala/geotrellis/store/util/IOUtils.scala index 84bcb472cb..0b083da36a 100644 --- a/store/src/main/scala/geotrellis/store/util/IOUtils.scala +++ b/store/src/main/scala/geotrellis/store/util/IOUtils.scala @@ -60,7 +60,7 @@ object IOUtils { implicit val cs = IO.contextShift(ec) val indices: Iterator[BigInt] = ranges.flatMap { case (start, end) => - (start to end).toIterator + (start to end).iterator } val index: fs2.Stream[IO, BigInt] = fs2.Stream.fromIterator[IO](indices)