Skip to content

Commit

Permalink
Add RasterSources API (#3053)
Browse files Browse the repository at this point in the history
* Add RasterSources

* Enable GDAL tests

* Bump all dependencies versions up
  • Loading branch information
pomadchin committed Sep 4, 2019
1 parent ba9ed41 commit 7cf9bda
Show file tree
Hide file tree
Showing 157 changed files with 10,030 additions and 110 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Expand Up @@ -53,3 +53,5 @@ derby.log
metastore_db/
*.log

spark/src/test/resources/vlm/*catalog*

3 changes: 2 additions & 1 deletion .locationtech/deploy-211.sh
Expand Up @@ -26,4 +26,5 @@
&& ./sbt "project vectortile" publish -no-colors \
&& ./sbt "project raster-testkit" publish -no-colors \
&& ./sbt "project vector-testkit" publish -no-colors \
&& ./sbt "project spark-testkit" publish -no-colors
&& ./sbt "project spark-testkit" publish -no-colors \
&& ./sbt "project gdal" publish -no-colors
3 changes: 2 additions & 1 deletion .locationtech/deploy-212.sh
Expand Up @@ -26,4 +26,5 @@
&& ./sbt -212 "project vectortile" publish -no-colors \
&& ./sbt -212 "project raster-testkit" publish -no-colors \
&& ./sbt -212 "project vector-testkit" publish -no-colors \
&& ./sbt -212 "project spark-testkit" publish -no-colors
&& ./sbt -212 "project spark-testkit" publish -no-colors \
&& ./sbt -212 "project gdal" publish -no-colors
2 changes: 1 addition & 1 deletion .travis.yml
Expand Up @@ -49,7 +49,7 @@ cache:
- $HOME/downloads

script:
- .travis/build-and-test.sh
- .travis/build-and-test-docker.sh

notifications:
email:
Expand Down
10 changes: 10 additions & 0 deletions .travis/build-and-test-docker.sh
@@ -0,0 +1,10 @@
#!/usr/bin/env bash

docker run -it --net=host \
-v $HOME/.ivy2:/root/.ivy2 \
-v $HOME/.sbt:/root/.sbt \
-v $TRAVIS_BUILD_DIR:/geotrellis \
-e RUN_SET=$RUN_SET \
-e TRAVIS_SCALA_VERSION=$TRAVIS_SCALA_VERSION \
-e TRAVIS_COMMIT=$TRAVIS_COMMIT \
-e TRAVIS_JDK_VERSION=$TRAVIS_JDK_VERSION quay.io/azavea/openjdk-gdal:2.4-jdk8-slim /bin/bash -c "cd /geotrellis; .travis/build-and-test.sh"
1 change: 1 addition & 0 deletions .travis/build-and-test-set-2.sh
Expand Up @@ -2,6 +2,7 @@

./sbt "++$TRAVIS_SCALA_VERSION" \
"project raster" test \
"project gdal" test \
"project accumulo" test \
"project accumulo-spark" test \
"project s3" test \
Expand Down
1 change: 1 addition & 0 deletions .travis/build-and-test-set-3.sh
Expand Up @@ -2,4 +2,5 @@

./sbt "++$TRAVIS_SCALA_VERSION" \
"project spark" test \
"project gdal-spark" test \
"project spark-pipeline" test || { exit 1; }
1 change: 1 addition & 0 deletions .travis/build-set-2.sh
Expand Up @@ -2,6 +2,7 @@

./sbt "++$TRAVIS_SCALA_VERSION" \
"project raster" test \
"project gdal" test \
"project accumulo" test \
"project accumulo-spark" test \
"project s3" test \
Expand Down
1 change: 1 addition & 0 deletions .travis/build-set-3.sh
Expand Up @@ -2,4 +2,5 @@

./sbt "++$TRAVIS_SCALA_VERSION" \
"project spark" test \
"project gdal-spark" test \
"project spark-pipeline" test || { exit 1; }
8 changes: 4 additions & 4 deletions .travis/hbase-install.sh
@@ -1,6 +1,6 @@
#! /bin/bash

if [ ! -f $HOME/downloads/hbase-2.1.5-bin.tar.gz ]; then sudo wget -O $HOME/downloads/hbase-2.1.5-bin.tar.gz http://www-us.apache.org/dist/hbase/2.1.5/hbase-2.1.5-bin.tar.gz; fi
sudo mv $HOME/downloads/hbase-2.1.5-bin.tar.gz hbase-2.1.5-bin.tar.gz && tar xzf hbase-2.1.5-bin.tar.gz
sudo rm -f hbase-2.1.5/conf/hbase-site.xml && sudo mv .travis/hbase/hbase-site.xml hbase-2.1.5/conf
sudo hbase-2.1.5/bin/start-hbase.sh
if [ ! -f $HOME/downloads/hbase-2.1.6-bin.tar.gz ]; then sudo wget -O $HOME/downloads/hbase-2.1.6-bin.tar.gz http://www-us.apache.org/dist/hbase/2.1.6/hbase-2.1.6-bin.tar.gz; fi
sudo mv $HOME/downloads/hbase-2.1.6-bin.tar.gz hbase-2.1.6-bin.tar.gz && tar xzf hbase-2.1.6-bin.tar.gz
sudo rm -f hbase-2.1.6/conf/hbase-site.xml && sudo mv .travis/hbase/hbase-site.xml hbase-2.1.6/conf
sudo hbase-2.1.6/bin/start-hbase.sh
72 changes: 38 additions & 34 deletions build.sbt
Expand Up @@ -58,7 +58,7 @@ lazy val commonSettings = Seq(
.filter(_.asFile.canRead)
.map(Credentials(_)),

addCompilerPlugin("org.spire-math" % "kind-projector" % "0.9.10" cross CrossVersion.binary),
addCompilerPlugin("org.typelevel" %% "kind-projector" % "0.10.3" cross CrossVersion.binary),
addCompilerPlugin("org.scalamacros" %% "paradise" % "2.1.1" cross CrossVersion.full),

pomExtra := (
Expand Down Expand Up @@ -105,23 +105,23 @@ lazy val commonSettings = Seq(
updateOptions := updateOptions.value.withGigahorse(false)
)

lazy val root = Project("geotrellis", file(".")).
aggregate(
`accumulo`,
lazy val root = Project("geotrellis", file("."))
.aggregate(
accumulo,
`accumulo-spark`,
`cassandra`,
cassandra,
`cassandra-spark`,
`doc-examples`,
geomesa,
geotools,
geowave,
`hbase`,
hbase,
`hbase-spark`,
macros,
proj4,
raster,
`raster-testkit`,
`s3`,
s3,
`s3-spark`,
shapefile,
spark,
Expand All @@ -130,20 +130,13 @@ lazy val root = Project("geotrellis", file(".")).
util,
vector,
`vector-testkit`,
vectortile
).
settings(commonSettings: _*).
enablePlugins(ScalaUnidocPlugin).
settings(
initialCommands in console :=
"""
import geotrellis.raster._
import geotrellis.vector._
import geotrellis.proj4._
import geotrellis.spark._
"""
).
settings(unidocProjectFilter in (ScalaUnidoc, unidoc) := inAnyProject -- inProjects(geowave))
vectortile,
gdal,
`gdal-spark`
)
.settings(commonSettings: _*)
.enablePlugins(ScalaUnidocPlugin)
.settings(unidocProjectFilter in (ScalaUnidoc, unidoc) := inAnyProject -- inProjects(geowave))

lazy val macros = project
.settings(commonSettings)
Expand Down Expand Up @@ -204,24 +197,24 @@ lazy val `spark-testkit` = project
.settings(commonSettings)
.settings(Settings.`spark-testkit`)

lazy val `s3` = project
lazy val s3 = project
.dependsOn(store)
.settings(commonSettings)
.settings(Settings.`s3`)
.settings(Settings.s3)

lazy val `s3-spark` = project
.dependsOn(
spark % "compile->compile;test->test", // <-- spark-testkit update should simplify this
`s3`,
s3,
`spark-testkit` % Test
)
.settings(commonSettings)
.settings(Settings.`s3-spark`)

lazy val `accumulo` = project
lazy val accumulo = project
.dependsOn(store)
.settings(commonSettings)
.settings(Settings.`accumulo`)
.settings(Settings.accumulo)

lazy val `accumulo-spark` = project
.dependsOn(
Expand All @@ -232,29 +225,29 @@ lazy val `accumulo-spark` = project
.settings(commonSettings)
.settings(Settings.`accumulo-spark`)

lazy val `cassandra` = project
lazy val cassandra = project
.dependsOn(store)
.settings(commonSettings)
.settings(Settings.`cassandra`)
.settings(Settings.cassandra)

lazy val `cassandra-spark` = project
.dependsOn(
`cassandra`,
cassandra,
spark % "compile->compile;test->test", // <-- spark-testkit update should simplify this
`spark-testkit` % Test
)
.settings(commonSettings)
.settings(Settings.`cassandra-spark`)

lazy val `hbase` = project
lazy val hbase = project
.dependsOn(store)
.settings(commonSettings) // HBase depends on its own protobuf version
.settings(Settings.`hbase`)
.settings(Settings.hbase)
.settings(projectDependencies := { Seq((projectID in layer).value.exclude("com.google.protobuf", "protobuf-java")) })

lazy val `hbase-spark` = project
.dependsOn(
`hbase`,
hbase,
spark % "compile->compile;test->test", // <-- spark-testkit update should simplify this
`spark-testkit` % Test
)
Expand All @@ -269,7 +262,7 @@ lazy val `spark-pipeline` = Project(id = "spark-pipeline", base = file("spark-pi

lazy val geotools = project
.dependsOn(raster, vector, proj4, `vector-testkit` % Test, `raster-testkit` % Test,
`raster` % "test->test" // <-- to get rid of this, move `GeoTiffTestUtils` to the testkit.
raster % "test->test" // <-- to get rid of this, move `GeoTiffTestUtils` to the testkit.
)
.settings(commonSettings)
.settings(Settings.geotools)
Expand Down Expand Up @@ -310,11 +303,22 @@ lazy val bench = project
.settings(Settings.bench)

lazy val layer = project
.dependsOn(raster)
.dependsOn(raster, `raster-testkit` % Test)
.settings(commonSettings)
.settings(Settings.layer)

lazy val store = project
.dependsOn(layer)
.settings(commonSettings)
.settings(Settings.store)

lazy val gdal = project
.dependsOn(raster, `raster-testkit` % Test)
.settings(commonSettings)
.settings(Settings.gdal)

lazy val `gdal-spark` = project
.dependsOn(gdal, spark, `spark-testkit` % Test)
.settings(commonSettings)
.settings(publish / skip := true) // at this point we need this project only for tests
.settings(Settings.`gdal-spark`)
4 changes: 3 additions & 1 deletion docs/CHANGELOG.rst
Expand Up @@ -6,6 +6,8 @@ Changelog

API Changes & Project structure changes

- **New:** Add RasterSources API (`#3053 <https://github.com/locationtech/geotrellis/pull/3053>`_).

- ``geotrellis-vector``

- **Change:** We are now favoring direct use of JTS geometries for improved interoperability with other projects. Scala wrapper classes for Geometry have been snuffed. Many submodules of ``geotrellis.vector`` have also been sacked in favor of direct usage of the corresponding JTS functionality. Extension methods and companion objects have been employed to maintain a crisp, candy shell around JTS to keep most interactions from messing up your fingers. Import ``geotrellis.vector._`` to access these niceties; if it is required, ``import org.locationtech.jts.{geom => jts}`` to prevent namespace collisions. In the REPL, geometries will need to be constructed via the duplicate definitions in the ``JTS`` object to avoid namespace clashes that appear to be buggy behavior on the part of the REPL (that is, use ``JTS.Point(0,0)`` to construct a point at the origin in interactive sessions, but in compiled code, ``Point(0,0)`` will suffice).
Expand Down Expand Up @@ -106,7 +108,6 @@ API Changes & Project structure changes
Fixes & Updates
^^^^^^^^^^^^^^^

- Update pureconfig to version 0.10.2 (`#2882 <https://github.com/locationtech/geotrellis/pull/2882>`_).
- Update dependencies (`#2904 <https://github.com/locationtech/geotrellis/pull/2904>`_).
- Bump ScalaPB version up with some API enhancements (`#2898 <https://github.com/locationtech/geotrellis/pull/2898>`_).
- Artifacts in Viewshed have been addressed, the pixels/meter calculation has also been improved (`#2917 <https://github.com/locationtech/geotrellis/pull/2917>`_).
Expand All @@ -119,6 +120,7 @@ Fixes & Updates
- ``S3RangeReader`` will now optionally read the HEADER of an object (`#3025 <https://github.com/locationtech/geotrellis/pull/3025>`_).
- ``FileRangeReaderProvider`` can now handle more types of ``URI``\s (`#3034 <https://github.com/locationtech/geotrellis/pull/3034>`_).
- Bump proj4 version to fix multiple performance issues (`#3039 <https://github.com/locationtech/geotrellis/pull/3039>`_).
- Update dependencies (`#3053 <https://github.com/locationtech/geotrellis/pull/3053>`_).

2.3.0
-----
Expand Down
35 changes: 35 additions & 0 deletions gdal-spark/src/test/scala/geotrellis/GDALTestUtils.scala
@@ -0,0 +1,35 @@
/*
* Copyright 2019 Azavea
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package geotrellis

import java.io.File

object GDALTestUtils {
def gdalGeoTiffPath(name: String): String = {
def baseDataPath = "../gdal/src/test/resources"
val path = s"$baseDataPath/$name"
require(new File(path).exists, s"$path does not exist, unzip the archive?")
path
}

def sparkGeoTiffPath(name: String): String = {
def baseDataPath = "../spark/src/test/resources"
val path = s"$baseDataPath/$name"
require(new File(path).exists, s"$path does not exist, unzip the archive?")
path
}
}

0 comments on commit 7cf9bda

Please sign in to comment.