Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix dependency conflicts #3798

Merged
merged 9 commits into from Aug 4, 2018
Merged
1 change: 1 addition & 0 deletions .gitignore
Expand Up @@ -65,6 +65,7 @@ TAGS

# sbt specific
.cache
.coursier-cache
.history
.lib/
dist/*
Expand Down
6 changes: 2 additions & 4 deletions app-backend/api/src/main/scala/toolrun/Routes.scala
Expand Up @@ -7,7 +7,6 @@ import com.azavea.rf.datamodel._
import com.azavea.rf.tool.ast.MapAlgebraAST
import com.azavea.rf.tool.eval.PureInterpreter
import com.azavea.rf.database.filter.Filterables._
import com.azavea.maml.serve.InterpreterExceptionHandling
import com.lonelyplanet.akka.http.extensions.PaginationDirectives
import de.heikoseeberger.akkahttpcirce.ErrorAccumulatingCirceSupport._
import akka.http.scaladsl.model.StatusCodes
Expand All @@ -33,8 +32,7 @@ trait ToolRunRoutes extends Authentication
with PaginationDirectives
with ToolRunQueryParametersDirective
with CommonHandlers
with UserErrorHandler
with InterpreterExceptionHandling {
with UserErrorHandler {

val xa: Transactor[IO]

Expand Down Expand Up @@ -95,7 +93,7 @@ trait ToolRunRoutes extends Authentication
def createToolRun: Route = authenticate { user =>
entity(as[ToolRun.Create]) { newRun =>
onSuccess(ToolRunDao.insertToolRun(newRun, user).transact(xa).unsafeToFuture) { toolRun =>
handleExceptions(interpreterExceptionHandler) {
{
complete {
(StatusCodes.Created, toolRun)
}
Expand Down
4 changes: 1 addition & 3 deletions app-backend/api/src/main/scala/tools/Routes.scala
Expand Up @@ -7,7 +7,6 @@ import com.azavea.rf.datamodel._
import com.azavea.rf.tool.ast._
import com.azavea.rf.tool.ast.codec._
import com.azavea.rf.database.filter.Filterables._
import com.azavea.maml.serve._
import io.circe._
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
Expand All @@ -32,7 +31,6 @@ trait ToolRoutes extends Authentication
with PaginationDirectives
with CommonHandlers
with KamonTraceDirectives
with InterpreterExceptionHandling
with UserErrorHandler {

val xa: Transactor[IO]
Expand Down Expand Up @@ -193,7 +191,7 @@ trait ToolRoutes extends Authentication

def validateAST: Route = authenticate { user =>
entity(as[Json]) { jsonAst =>
handleExceptions(interpreterExceptionHandler) {
{
complete {
jsonAst.as[MapAlgebraAST] match {
case Right(ast) =>
Expand Down
Expand Up @@ -11,7 +11,6 @@ import com.azavea.rf.batch.util._
import com.azavea.rf.batch.util.conf._
import com.azavea.rf.datamodel._
import com.azavea.rf.tool.ast.MapAlgebraAST
import com.azavea.maml.serve._
import com.azavea.maml.eval._
import io.circe.parser._
import io.circe.syntax._
Expand Down
Expand Up @@ -165,7 +165,7 @@ object Ingest extends SparkJob with RollbarNotifier with Config {
bucket = s3uri.getBucket,
key = s3uri.getKey,
client = S3Client.DEFAULT),
decompress = false, streaming = true, withOverviews = true, None)
streaming = true, withOverviews = true, None)
}

val info = readInfo
Expand Down
Expand Up @@ -39,8 +39,8 @@ case class S3ToPostgres(uri: AmazonS3URI, attributeTable: String = "layer_attrib
val LayerAttributes(header, metadata, keyIndex, schema) = from.readLayerAttributesSafe[S3LayerHeader, TileLayerMetadata[SpatialKey], SpatialKey](layerId)
to.write(layerId, AttributeStore.Fields.header, header)
to.write(layerId, AttributeStore.Fields.metadata, metadata)
to.write(layerId, AttributeStore.Fields.keyIndex, keyIndex)
to.write(layerId, AttributeStore.Fields.schema, schema)
to.write(layerId, AttributeStore.AvroLayerFields.keyIndex, keyIndex)
to.write(layerId, AttributeStore.AvroLayerFields.schema, schema)
to.write(layerId, "layerComplete", from.cacheReadSafe[Boolean](layerId, "layerComplete"))
} else {
to.write(layerId, "histogram", from.cacheReadSafe[Array[Histogram[Double]]](layerId, "histogram"))
Expand Down
Expand Up @@ -54,8 +54,8 @@ package object batch {
LayerAttributes(
blob.fields(AttributeStore.Fields.header).convertTo[H],
blob.fields(AttributeStore.Fields.metadata).convertTo[M],
blob.fields(AttributeStore.Fields.keyIndex).convertTo[KeyIndex[K]],
blob.fields(AttributeStore.Fields.schema).convertTo[Schema]
blob.fields(AttributeStore.AvroLayerFields.keyIndex).convertTo[KeyIndex[K]],
blob.fields(AttributeStore.AvroLayerFields.schema).convertTo[Schema]
)
}
}
Expand Down
3 changes: 2 additions & 1 deletion app-backend/build.sbt
Expand Up @@ -335,7 +335,7 @@ lazy val tool = Project("tool", file("tool"))
.settings(resolvers += Resolver.bintrayRepo("azavea", "maven"))
.settings({
libraryDependencies ++= loggingDependencies ++ Seq(
Dependencies.spark,
Dependencies.sparkCore,
Dependencies.geotrellisSpark,
Dependencies.geotrellisRaster,
Dependencies.geotrellisRasterTestkit,
Expand All @@ -346,6 +346,7 @@ lazy val tool = Project("tool", file("tool"))
Dependencies.circeParser,
Dependencies.circeOptics,
Dependencies.scalaCheck,
Dependencies.scalaz,
Dependencies.mamlJvm
)
})
Expand Down
4 changes: 2 additions & 2 deletions app-backend/common/src/main/scala/utils/CogUtils.scala
Expand Up @@ -45,7 +45,7 @@ object CogUtils {
}.mapFilter { headerBytes =>
RangeReaderUtils.fromUri(uri).map { rr =>
val crr = CacheRangeReader(rr, headerBytes)
GeoTiffReader.readMultiband(crr, decompress = false, streaming = true)
GeoTiffReader.readMultiband(crr, streaming = true)
}
}
}
Expand Down Expand Up @@ -149,7 +149,7 @@ object CogUtils {
def getTiffExtent(uri: String): Option[Projected[MultiPolygon]] = {
for {
rr <- RangeReaderUtils.fromUri(uri)
tiff = GeoTiffReader.readMultiband(rr, decompress = false, streaming = true)
tiff = GeoTiffReader.readMultiband(rr, streaming = true)
} yield {
val crs = tiff.crs
Projected(MultiPolygon(tiff.extent.reproject(crs, WebMercator).toPolygon()), 3857)
Expand Down
Expand Up @@ -12,7 +12,7 @@ import io.circe.generic.semiauto._

import com.azavea.rf.bridge._

import geotrellis.vector._
import geotrellis.vector.{Projected => _, _}
import geotrellis.vector.io._
import geotrellis.proj4._
import geotrellis.slick._
Expand Down
Expand Up @@ -159,7 +159,7 @@ object ProjectDao extends Dao[Project] {
}

def addScenesToProject(sceneIds: NonEmptyList[UUID], projectId: UUID, user: User, isAccepted: Boolean): ConnectionIO[Int] = {
val inClause = fr"scenes.id IN (" ++ Fragment.const(sceneIds.map(_.show).foldSmash("'", "','", "'")) ++ fr")"
val inClause = Fragments.in(fr"scenes.id", sceneIds)
val sceneIdWithDatasourceF = fr"""
SELECT scenes.id,
datasources.id,
Expand Down
Expand Up @@ -50,12 +50,10 @@ object SceneToProjectDao extends Dao[SceneToProject] with LazyLogging {
val updateF: Fragment =fr"""
UPDATE scenes_to_projects
SET accepted = true
WHERE
project_id = ${projectId}
AND
scene_id IN (""" ++
Fragment.const(sceneIds.map(_.show).foldSmash("'", "','", "'")) ++
fr")"
""" ++ Fragments.whereAnd(
fr"project_id = $projectId",
Fragments.in(fr"scene_id", sceneIds)
)
updateF.update.run
}

Expand Down