diff --git a/build.sbt b/build.sbt index 1a81c338a..8f525ae6f 100755 --- a/build.sbt +++ b/build.sbt @@ -69,6 +69,7 @@ lazy val loader = project.in(file("modules/loader")) Dependencies.ssm, Dependencies.dynamodb, Dependencies.jSch, + Dependencies.sentry, Dependencies.specs2, Dependencies.specs2ScalaCheck, diff --git a/modules/common/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/common/StorageTarget.scala b/modules/common/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/common/StorageTarget.scala index d5455ab2d..3a94f0f1e 100644 --- a/modules/common/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/common/StorageTarget.scala +++ b/modules/common/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/common/StorageTarget.scala @@ -12,6 +12,7 @@ */ package com.snowplowanalytics.snowplow.rdbloader.common +import java.net.URI import java.util.UUID import java.util.Properties @@ -50,6 +51,7 @@ sealed trait StorageTarget extends Product with Serializable { def blacklistTabular: Option[List[SchemaCriterion]] // None means tabular is disabled def messageQueue: Option[String] + def sentryDsn: Option[URI] } object StorageTarget { @@ -89,7 +91,8 @@ object StorageTarget { sshTunnel: Option[TunnelConfig], blacklistTabular: Option[List[SchemaCriterion]], - messageQueue: Option[String]) + messageQueue: Option[String], + sentryDsn: Option[URI]) extends StorageTarget /** @@ -164,14 +167,28 @@ object StorageTarget { case class DestinationConfig(host: String, port: Int) /** ADT representing fact that password can be either plain-text or encrypted in EC2 Parameter Store */ - sealed trait PasswordConfig { + sealed trait PasswordConfig extends Product with Serializable { def getUnencrypted: String = this match { - case PlainText(plain) => plain - case EncryptedKey(EncryptedConfig(key)) => key.parameterName + case PasswordConfig.PlainText(plain) => plain + case PasswordConfig.EncryptedKey(EncryptedConfig(key)) => key.parameterName + } + } + object PasswordConfig { + final case class PlainText(value: String) extends PasswordConfig + final case class EncryptedKey(value: EncryptedConfig) extends PasswordConfig + + implicit object PasswordDecoder extends Decoder[PasswordConfig] { + def apply(hCursor: HCursor): Decoder.Result[PasswordConfig] = { + hCursor.value.asString match { + case Some(s) => Right(PasswordConfig.PlainText(s)) + case None => hCursor.value.asObject match { + case Some(_) => hCursor.value.as[EncryptedConfig].map(PasswordConfig.EncryptedKey) + case None => Left(DecodingFailure("password should be either plain text or reference to encrypted key", hCursor.history)) + } + } + } } } - case class PlainText(value: String) extends PasswordConfig - case class EncryptedKey(value: EncryptedConfig) extends PasswordConfig /** * SSH configuration, enabling target to be loaded though tunnel @@ -183,18 +200,6 @@ object StorageTarget { */ case class TunnelConfig(bastion: BastionConfig, localPort: Int, destination: DestinationConfig) - implicit object PasswordDecoder extends Decoder[PasswordConfig] { - def apply(hCursor: HCursor): Decoder.Result[PasswordConfig] = { - hCursor.value.asString match { - case Some(s) => Right(PlainText(s)) - case None => hCursor.value.asObject match { - case Some(_) => hCursor.value.as[EncryptedConfig].map(EncryptedKey) - case None => Left(DecodingFailure("password should be either plain text or reference to encrypted key", hCursor.history)) - } - } - } - } - /** * Decode Json as one of known storage targets * @@ -222,6 +227,9 @@ object StorageTarget { .toEitherNel .flatMap { json => (decodeStorageTarget(json).toEitherNel, validate(client)(json).toEitherNel).parMapN { case (config, _) => config } } + implicit def uriDecoder: Decoder[URI] = + Decoder[String].emap(s => Either.catchOnly[IllegalArgumentException](URI.create(s)).leftMap(_.toString)) + implicit def redsfhitConfigDecoder: Decoder[RedshiftConfig] = deriveDecoder[RedshiftConfig] @@ -240,9 +248,6 @@ object StorageTarget { implicit def parameterStoreConfigDecoder: Decoder[ParameterStoreConfig] = deriveDecoder[ParameterStoreConfig] - implicit def passwordConfigDecoder: Decoder[PasswordConfig] = - deriveDecoder[PasswordConfig] - implicit def schemaCriterionConfigDecoder: Decoder[SchemaCriterion] = Decoder.decodeString.emap { s => SchemaCriterion.parse(s).toRight(s"Cannot parse [$s] as Iglu SchemaCriterion, it must have iglu:vendor/name/format/1-*-* format") diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.rdbloader.common/StorageTargetSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.rdbloader.common/StorageTargetSpec.scala index f7002c2f1..cf0de385a 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.rdbloader.common/StorageTargetSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.rdbloader.common/StorageTargetSpec.scala @@ -11,6 +11,7 @@ */ package com.snowplowanalytics.snowplow.rdbloader.common +import java.net.URI import java.util.UUID import cats.Id @@ -18,82 +19,36 @@ import cats.data.NonEmptyList import io.circe.literal._ -import org.specs2.Specification +import org.specs2.mutable.Specification import com.snowplowanalytics.iglu.core.SchemaCriterion import com.snowplowanalytics.iglu.client.Client -class StorageTargetSpec extends Specification { def is = s2""" - Parse Redshift storage target configuration $e1 - Parse Redshift storage target (3-0-0) with tunnel $e2 - Parse Redshift storage target (3-0-0) with encrypted password $e3 - Fail to parse old Redshift storage target (3-0-0) with encrypted password $e4 - Parse Redshift storage target (3-0-0) with many JDBC options $e5 - Fail to parse Redshift storage target (3-0-0) with wrong JDBC value $e6 - Parse Redshift storage target (4-0-0) with tabular blacklist $e7 - """ +class StorageTargetSpec extends Specification { + import StorageTargetSpec._ - private val targetId = UUID.fromString("11112233-dddd-4845-a7e6-8fdc88d599d0") - - private val IgluCentral = "http://iglucentral-dev.com.s3-website-us-east-1.amazonaws.com/feature/rdb-blacklist/" - - private val resolverConfig = - json""" - { - "schema": "iglu:com.snowplowanalytics.iglu/resolver-config/jsonschema/1-0-2", - "data": { - "cacheSize": 500, - "repositories": [ - { - "name": "Iglu Central", - "priority": 1, - "vendorPrefixes": [ "com.snowplowanalytics" ], - "connection": { - "http": { - "uri": $IgluCentral - } - } - }, - { - "name": "Embedded Test", - "priority": 0, - "vendorPrefixes": [ "com.snowplowanalytics" ], - "connection": { - "embedded": { - "path": "/embed" - } - } - } - ] - } - } - """ - - private val resolver = Client.parseDefault[Id](resolverConfig).value.fold(throw _, identity) - private val parseWithDefaultResolver = StorageTarget.parseTarget(resolver, _: String) - - def e1 = { + "Parse Redshift storage target configuration" in { val config = """ - |{ - | "schema": "iglu:com.snowplowanalytics.snowplow.storage/redshift_config/jsonschema/3-0-0", - | "data": { - | "name": "AWS Redshift enriched events storage", - | "id": "11112233-dddd-4845-a7e6-8fdc88d599d0", - | "host": "example.host", - | "database": "ADD HERE", - | "port": 5439, - | "jdbc": { "ssl": true }, - | "processingManifest": null, - | "sshTunnel": null, - | "username": "ADD HERE", - | "password": "ADD HERE", - | "roleArn": "arn:aws:iam::123456789876:role/RedshiftLoadRole", - | "schema": "atomic", - | "maxError": 1, - | "compRows": 20000, - | "purpose": "ENRICHED_EVENTS" - | } - |} + |{ + | "schema": "iglu:com.snowplowanalytics.snowplow.storage/redshift_config/jsonschema/3-0-0", + | "data": { + | "name": "AWS Redshift enriched events storage", + | "id": "11112233-dddd-4845-a7e6-8fdc88d599d0", + | "host": "example.host", + | "database": "ADD HERE", + | "port": 5439, + | "jdbc": { "ssl": true }, + | "processingManifest": null, + | "sshTunnel": null, + | "username": "ADD HERE", + | "password": "ADD HERE", + | "roleArn": "arn:aws:iam::123456789876:role/RedshiftLoadRole", + | "schema": "atomic", + | "maxError": 1, + | "compRows": 20000, + | "purpose": "ENRICHED_EVENTS" + | } + |} """.stripMargin val expected = StorageTarget.RedshiftConfig( @@ -106,17 +61,18 @@ class StorageTargetSpec extends Specification { def is = s2""" "arn:aws:iam::123456789876:role/RedshiftLoadRole", "atomic", "ADD HERE", - StorageTarget.PlainText("ADD HERE"), + StorageTarget.PasswordConfig.PlainText("ADD HERE"), 1, 20000, None, None, + None, None) parseWithDefaultResolver(config) must beRight(expected) } - def e2 = { + "Parse Redshift storage target (3-0-0) with tunnel" in { val config = """ |{ | "schema": "iglu:com.snowplowanalytics.snowplow.storage/redshift_config/jsonschema/3-0-0", @@ -171,17 +127,18 @@ class StorageTargetSpec extends Specification { def is = s2""" "arn:aws:iam::123456789876:role/RedshiftLoadRole", "atomic", "ADD HERE", - StorageTarget.PlainText("ADD HERE"), + StorageTarget.PasswordConfig.PlainText("ADD HERE"), 1, 20000, Some(tunnel), None, + None, None) parseWithDefaultResolver(config) must beRight(expected) } - def e3 = { + "Parse Redshift storage target (3-0-0) with encrypted password" in { val config = """ |{ | "schema": "iglu:com.snowplowanalytics.snowplow.storage/redshift_config/jsonschema/3-0-0", @@ -219,17 +176,18 @@ class StorageTargetSpec extends Specification { def is = s2""" "arn:aws:iam::123456789876:role/RedshiftLoadRole", "atomic", "ADD HERE", - StorageTarget.EncryptedKey(StorageTarget.EncryptedConfig(StorageTarget.ParameterStoreConfig("snowplow.rdbloader.redshift.password"))), + StorageTarget.PasswordConfig.EncryptedKey(StorageTarget.EncryptedConfig(StorageTarget.ParameterStoreConfig("snowplow.rdbloader.redshift.password"))), 1, 20000, None, None, + None, None) parseWithDefaultResolver(config) must beRight(expected) } - def e4 = { + "Fail to parse old Redshift storage target (3-0-0) with encrypted password" in { val config = """ |{ | "schema": "iglu:com.snowplowanalytics.snowplow.storage/redshift_config/jsonschema/3-0-0", @@ -257,7 +215,7 @@ class StorageTargetSpec extends Specification { def is = s2""" parseWithDefaultResolver(config) must beLeft } - def e5 = { + "Parse Redshift storage target (3-0-0) with many JDBC options" in { val config = """ |{ | "schema": "iglu:com.snowplowanalytics.snowplow.storage/redshift_config/jsonschema/3-0-0", @@ -315,17 +273,18 @@ class StorageTargetSpec extends Specification { def is = s2""" "arn:aws:iam::123456789876:role/RedshiftLoadRole", "atomic", "ADD HERE", - StorageTarget.EncryptedKey(StorageTarget.EncryptedConfig(StorageTarget.ParameterStoreConfig("snowplow.rdbloader.redshift.password"))), + StorageTarget.PasswordConfig.EncryptedKey(StorageTarget.EncryptedConfig(StorageTarget.ParameterStoreConfig("snowplow.rdbloader.redshift.password"))), 1, 20000, None, None, + None, None) parseWithDefaultResolver(config) must beRight(expected) } - def e6 = { + "Fail to parse Redshift storage target (3-0-0) with wrong JDBC value" in { val config = """ |{ | "schema": "iglu:com.snowplowanalytics.snowplow.storage/redshift_config/jsonschema/3-0-0", @@ -359,9 +318,9 @@ class StorageTargetSpec extends Specification { def is = s2""" case NonEmptyList(StorageTarget.ParseError(message), Nil) => message must contain("$.jdbc.sslMode: does not have a value in the enumeration [verify-ca, verify-full]") case _ => ko("Not a DecodingError") } - } + } - def e7 = { + "Parse Redshift storage target (4-0-0) with tabular blacklist" in { val config = """ |{ | "schema": "iglu:com.snowplowanalytics.snowplow.storage/redshift_config/jsonschema/4-0-0", @@ -403,7 +362,7 @@ class StorageTargetSpec extends Specification { def is = s2""" "arn:aws:iam::123456789876:role/RedshiftLoadRole", "atomic", "ADD HERE", - StorageTarget.EncryptedKey(StorageTarget.EncryptedConfig(StorageTarget.ParameterStoreConfig("snowplow.rdbloader.redshift.password"))), + StorageTarget.PasswordConfig.EncryptedKey(StorageTarget.EncryptedConfig(StorageTarget.ParameterStoreConfig("snowplow.rdbloader.redshift.password"))), 1, 20000, None, @@ -411,13 +370,106 @@ class StorageTargetSpec extends Specification { def is = s2""" SchemaCriterion("com.acme", "event", "jsonschema", Some(1), None, None), SchemaCriterion("com.acme", "context", "jsonschema", Some(2), None, None) )), + None, None ) parseWithDefaultResolver(config) must beRight(expected) } + + "Parse Redshift storage target (4-0-1) with Sentry and SQS message queue" in { + val config = + """ + { + "schema": "iglu:com.snowplowanalytics.snowplow.storage/redshift_config/jsonschema/4-0-1", + "data": { + "name": "AWS Redshift enriched events storage", + "id": "11112233-dddd-4845-a7e6-8fdc88d599d0", + "host": "192.168.1.12", + "database": "ADD HERE", + "port": 5439, + "jdbc": {}, + "processingManifest": null, + "sshTunnel": null, + "username": "ADD HERE", + "password": { + "ec2ParameterStore": { + "parameterName": "snowplow.rdbloader.redshift.password" + } + }, + "roleArn": "arn:aws:iam::123456789876:role/RedshiftLoadRole", + "schema": "atomic", + "maxError": 1, + "compRows": 20000, + "blacklistTabular": [], + "purpose": "ENRICHED_EVENTS", + "messageQueue": "message-queue", + "sentryDsn": "http://sentry.com/foo" + } + }""" + + val expected = StorageTarget.RedshiftConfig( + targetId, + "AWS Redshift enriched events storage", + "192.168.1.12", + "ADD HERE", + 5439, + StorageTarget.RedshiftJdbc.empty, + "arn:aws:iam::123456789876:role/RedshiftLoadRole", + "atomic", + "ADD HERE", + StorageTarget.PasswordConfig.EncryptedKey(StorageTarget.EncryptedConfig(StorageTarget.ParameterStoreConfig("snowplow.rdbloader.redshift.password"))), + 1, + 20000, + None, + Some(List()), + Some("message-queue"), + Some(URI.create("http://sentry.com/foo")) + ) + + parseWithDefaultResolver(config) must beRight(expected) + } } object StorageTargetSpec { val enableSsl = StorageTarget.RedshiftJdbc.empty.copy(ssl = Some(true)) + + private val targetId = UUID.fromString("11112233-dddd-4845-a7e6-8fdc88d599d0") + + private val IgluCentral = "https://raw.githubusercontent.com/snowplow/iglu-central/feature/redshift-401" + + private val resolverConfig = + json""" + { + "schema": "iglu:com.snowplowanalytics.iglu/resolver-config/jsonschema/1-0-2", + "data": { + "cacheSize": 500, + "repositories": [ + { + "name": "Iglu Central", + "priority": 1, + "vendorPrefixes": [ "com.snowplowanalytics" ], + "connection": { + "http": { + "uri": $IgluCentral + } + } + }, + { + "name": "Embedded Test", + "priority": 0, + "vendorPrefixes": [ "com.snowplowanalytics" ], + "connection": { + "embedded": { + "path": "/embed" + } + } + } + ] + } + } + """ + + private val resolver = Client.parseDefault[Id](resolverConfig).value.fold(throw _, identity) + private val parseWithDefaultResolver = StorageTarget.parseTarget(resolver, _: String) } diff --git a/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/Main.scala b/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/Main.scala index e53cd3dc1..694f6c05b 100644 --- a/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/Main.scala +++ b/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/Main.scala @@ -16,16 +16,18 @@ import cats.Monad import cats.data.Validated._ import cats.implicits._ -import cats.effect.{ExitCode, IO, IOApp} +import cats.effect.{IOApp, IO, ExitCode} import fs2.Stream import com.snowplowanalytics.snowplow.rdbloader.common.S3 -import com.snowplowanalytics.snowplow.rdbloader.dsl.{JDBC, RealWorld, Logging, AWS} +import com.snowplowanalytics.snowplow.rdbloader.dsl.{Logging, JDBC, RealWorld, AWS} import com.snowplowanalytics.snowplow.rdbloader.config.CliConfig import com.snowplowanalytics.snowplow.rdbloader.loaders.Common.{discover, load} import com.snowplowanalytics.snowplow.rdbloader.utils.SSH +import io.sentry.Sentry + object Main extends IOApp { /** * If arguments or config is invalid exit with 1 @@ -45,6 +47,7 @@ object Main extends IOApp { .attempt .map { case Left(e) => + Sentry.captureException(e) e.printStackTrace(System.out) (LoaderError.LoaderLocalError(e.getMessage): LoaderError).asLeft case Right(e) => e diff --git a/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/JDBC.scala b/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/JDBC.scala index e12b733b0..e2d8d688b 100644 --- a/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/JDBC.scala +++ b/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/JDBC.scala @@ -75,9 +75,9 @@ object JDBC { */ def getConnection[F[_]: Sync: Timer: AWS](target: StorageTarget): F[Connection] = { val password: F[String] = target.password match { - case StorageTarget.PlainText(text) => + case StorageTarget.PasswordConfig.PlainText(text) => Sync[F].pure(text) - case StorageTarget.EncryptedKey(StorageTarget.EncryptedConfig(key)) => + case StorageTarget.PasswordConfig.EncryptedKey(StorageTarget.EncryptedConfig(key)) => AWS[F].getEc2Property(key.parameterName).map(b => new String(b)) } diff --git a/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/Logging.scala b/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/Logging.scala index 8866d61ef..824fac7af 100644 --- a/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/Logging.scala +++ b/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/Logging.scala @@ -12,33 +12,31 @@ */ package com.snowplowanalytics.snowplow.rdbloader.dsl -import java.time.{Instant, ZoneId} +import java.time.{ZoneId, Instant} import java.time.format.DateTimeFormatter +import scala.util.control.NonFatal + import org.joda.time.DateTime -import cats.{Monad, Id} +import cats.{Id, Monad} import cats.data.NonEmptyList -import cats.syntax.option._ -import cats.syntax.apply._ -import cats.syntax.flatMap._ -import cats.syntax.either._ -import cats.syntax.functor._ -import cats.syntax.show._ -import cats.instances.either._ +import cats.implicits._ import cats.effect.Sync import cats.effect.concurrent.Ref import io.circe.Json -import com.snowplowanalytics.iglu.core.{SchemaKey, SchemaVer, SelfDescribingData} +import com.snowplowanalytics.iglu.core.{SchemaVer, SelfDescribingData, SchemaKey} import com.snowplowanalytics.snowplow.scalatracker.emitters.id.RequestProcessor._ -import com.snowplowanalytics.snowplow.scalatracker.{Emitter, Tracker} -import com.snowplowanalytics.snowplow.scalatracker.emitters.id.{SyncBatchEmitter, SyncEmitter} +import com.snowplowanalytics.snowplow.scalatracker.{Tracker, Emitter} +import com.snowplowanalytics.snowplow.scalatracker.emitters.id.{SyncEmitter, SyncBatchEmitter} import com.snowplowanalytics.snowplow.rdbloader.LoaderError import com.snowplowanalytics.snowplow.rdbloader.common.{Common, _} -import com.snowplowanalytics.snowplow.rdbloader.config.SnowplowConfig.{ Monitoring, TrackerMethod } +import com.snowplowanalytics.snowplow.rdbloader.config.SnowplowConfig.{TrackerMethod, Monitoring} + +import io.sentry.Sentry trait Logging[F[_]] { @@ -54,6 +52,9 @@ trait Logging[F[_]] { /** Print message to stdout */ def print(message: String): F[Unit] + + /** Log an error to Sentry if it's configured */ + def trackException(e: Throwable): F[Unit] } object Logging { @@ -105,6 +106,11 @@ object Logging { _ <- Sync[F].delay(System.out.println(timestamped)) *> log(timestamped) } yield () + def trackException(e: Throwable): F[Unit] = + Sync[F].delay(Sentry.captureException(e)).void.recover { + case NonFatal(_) => () + } + private def log(message: String): F[Unit] = messages.update(buf => message :: buf) diff --git a/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/RealWorld.scala b/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/RealWorld.scala index 602ce9931..55bc5a76a 100644 --- a/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/RealWorld.scala +++ b/modules/loader/src/main/scala/com/snowplowanalytics/snowplow/rdbloader/dsl/RealWorld.scala @@ -12,13 +12,17 @@ */ package com.snowplowanalytics.snowplow.rdbloader.dsl +import java.net.URI + import cats.implicits._ -import cats.effect.{Sync, Clock, ConcurrentEffect} +import cats.effect.{Sync, ConcurrentEffect, Clock} import cats.effect.concurrent.Ref import com.snowplowanalytics.iglu.client.Client +import io.sentry.{Sentry, SentryOptions} + import com.snowplowanalytics.snowplow.rdbloader.common.S3 import com.snowplowanalytics.snowplow.rdbloader.config.CliConfig @@ -35,6 +39,7 @@ class RealWorld[F[_]](cache: Cache[F], logging: Logging[F], iglu: Iglu[F], aws: object RealWorld { def initialize[F[_] : ConcurrentEffect: Clock](config: CliConfig): F[RealWorld[F]] = for { + _ <- initSentry[F](config.target.sentryDsn) cacheMap <- Ref.of[F, Map[String, Option[S3.Key]]](Map.empty) messages <- Ref.of[F, List[String]](List.empty[String]) tracker <- Logging.initializeTracking[F](config.configYaml.monitoring) @@ -50,4 +55,14 @@ object RealWorld { iglu = Iglu.igluInterpreter[F](igluClient) aws = AWS.s3Interpreter[F](amazonS3) } yield new RealWorld[F](cache, logging, iglu, aws) + + def initSentry[F[_]: Sync](dsn: Option[URI]): F[Unit] = + dsn match { + case Some(uri) => + val options = new SentryOptions() + options.setDsn(uri.toString) + Sync[F].delay(Sentry.init(options)) + case None => + Sync[F].unit + } } \ No newline at end of file diff --git a/modules/loader/src/test/scala/com/snowplowanalytics/snowplow/rdbloader/SpecHelpers.scala b/modules/loader/src/test/scala/com/snowplowanalytics/snowplow/rdbloader/SpecHelpers.scala index 977d6ba5a..a7f6c9cfe 100644 --- a/modules/loader/src/test/scala/com/snowplowanalytics/snowplow/rdbloader/SpecHelpers.scala +++ b/modules/loader/src/test/scala/com/snowplowanalytics/snowplow/rdbloader/SpecHelpers.scala @@ -80,11 +80,12 @@ object SpecHelpers { "arn:aws:iam::123456789876:role/RedshiftLoadRole", "atomic", "admin", - StorageTarget.PlainText("Supersecret1"), + StorageTarget.PasswordConfig.PlainText("Supersecret1"), 1, 20000, None, None, + None, None) val validTargetWithManifest = StorageTarget.RedshiftConfig( @@ -97,11 +98,12 @@ object SpecHelpers { "arn:aws:iam::123456789876:role/RedshiftLoadRole", "atomic", "admin", - StorageTarget.PlainText("Supersecret1"), + StorageTarget.PasswordConfig.PlainText("Supersecret1"), 1, 20000, None, None, + None, None ) diff --git a/modules/loader/src/test/scala/com/snowplowanalytics/snowplow/rdbloader/TestInterpreter.scala b/modules/loader/src/test/scala/com/snowplowanalytics/snowplow/rdbloader/TestInterpreter.scala index 697f46bf6..697a0989b 100644 --- a/modules/loader/src/test/scala/com/snowplowanalytics/snowplow/rdbloader/TestInterpreter.scala +++ b/modules/loader/src/test/scala/com/snowplowanalytics/snowplow/rdbloader/TestInterpreter.scala @@ -129,6 +129,8 @@ object TestInterpreter { Test.pure(key.asRight) def print(message: String): Test[Unit] = results.print(message) + def trackException(e: Throwable): Test[Unit] = + results.print(s"EXCEPTION ${e.getMessage}") } def stateIgluInterpreter: Iglu[Test] = new Iglu[Test] { diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 60224de5a..ba20d6e9b 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -40,6 +40,7 @@ object Dependencies { val redshift = "1.2.51.1078" val aws = "1.11.916" val jSch = "0.1.55" + val sentry = "3.2.0" // Scala (test only) val specs2 = "4.10.5" @@ -85,6 +86,7 @@ object Dependencies { val s3 = "com.amazonaws" % "aws-java-sdk-s3" % V.aws val ssm = "com.amazonaws" % "aws-java-sdk-ssm" % V.aws val jSch = "com.jcraft" % "jsch" % V.jSch + val sentry = "io.sentry" % "sentry" % V.sentry // Java (Shredder) val dynamodb = "com.amazonaws" % "aws-java-sdk-dynamodb" % V.aws