diff --git a/.dwollaci.yml b/.dwollaci.yml index 8723180..da0bc72 100644 --- a/.dwollaci.yml +++ b/.dwollaci.yml @@ -2,7 +2,7 @@ stages: build: nodeLabel: sbt steps: - - sbt universal:packageBin + - sbt Universal/packageBin filesToStash: - '**' publish: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1854251..d26e441 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,49 +17,45 @@ on: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +concurrency: + group: ${{ github.workflow }} @ ${{ github.ref }} + cancel-in-progress: true + jobs: build: - name: Build and Test + name: Test strategy: matrix: - os: [ubuntu-latest] - scala: [2.13.8] - java: [temurin@8, temurin@11] + os: [ubuntu-22.04] + scala: [3] + java: [temurin@17] runs-on: ${{ matrix.os }} + timeout-minutes: 60 steps: - name: Checkout current branch (full) - uses: actions/checkout@v2 + uses: actions/checkout@v5 with: fetch-depth: 0 - - name: Setup Java (temurin@8) - if: matrix.java == 'temurin@8' - uses: actions/setup-java@v2 - with: - distribution: temurin - java-version: 8 + - name: Setup sbt + uses: sbt/setup-sbt@v1 - - name: Setup Java (temurin@11) - if: matrix.java == 'temurin@11' - uses: actions/setup-java@v2 + - name: Setup Java (temurin@17) + id: setup-java-temurin-17 + if: matrix.java == 'temurin@17' + uses: actions/setup-java@v5 with: distribution: temurin - java-version: 11 + java-version: 17 + cache: sbt - - name: Cache sbt - uses: actions/cache@v2 - with: - path: | - ~/.sbt - ~/.ivy2/cache - ~/.coursier/cache/v1 - ~/.cache/coursier/v1 - ~/AppData/Local/Coursier/Cache/v1 - ~/Library/Caches/Coursier/v1 - key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} + - name: sbt update + if: matrix.java == 'temurin@17' && steps.setup-java-temurin-17.outputs.cache-hit == 'false' + run: sbt +update - name: Check that workflows are up to date - run: sbt ++${{ matrix.scala }} githubWorkflowCheck + run: sbt githubWorkflowCheck - name: Build project - run: sbt ++${{ matrix.scala }} test + run: sbt '++ ${{ matrix.scala }}' test diff --git a/.mergify.yml b/.mergify.yml index 91ee2b5..8cc6f33 100644 --- a/.mergify.yml +++ b/.mergify.yml @@ -1,29 +1,23 @@ -queue_rules: - - name: default - conditions: - - status-success=Build and Test (ubuntu-latest, 2.13.8, temurin@8) - - status-success=Build and Test (ubuntu-latest, 2.13.8, temurin@11) +# This file was automatically generated by sbt-typelevel-mergify using the +# mergifyGenerate task. You should add and commit this file to +# your git repository. It goes without saying that you shouldn't edit +# this file by hand! Instead, if you wish to make changes, you should +# change your sbt build configuration to revise the mergify configuration +# to meet your needs, then regenerate this file. pull_request_rules: - - name: assign and label scala-steward's PRs - conditions: - - author=dwolla-oss-scala-steward[bot] - actions: - label: - add: [dependency-update] - - name: automatic update pull requests - conditions: - - author=dwolla-oss-scala-steward[bot] - - -conflict # skip PRs with conflicts - - -draft # filter-out GH draft PRs - actions: - update: - - name: merge scala-steward's PRs - conditions: - - author=dwolla-oss-scala-steward[bot] - - status-success=Build and Test (ubuntu-latest, 2.13.8, temurin@8) - - status-success=Build and Test (ubuntu-latest, 2.13.8, temurin@11) - actions: - queue: - method: squash - name: default +- name: merge scala-steward's PRs + conditions: + - author=scala-steward + - body~=labels:.*early-semver-patch + - status-success=Test (ubuntu-22.04, 3, temurin@17) + actions: + merge: {} +- name: Label smithy PRs + conditions: + - files~=^smithy/ + actions: + label: + add: + - smithy + remove: [] diff --git a/build.sbt b/build.sbt index ab60c95..3639f0a 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ ThisBuild / organization := "com.dwolla" ThisBuild / description := "CloudFormation custom resource to initialize a PostgreSQL database with a new user" ThisBuild / homepage := Some(url("https://github.com/Dwolla/postgresql-init-custom-resource")) ThisBuild / licenses += ("MIT", url("https://opensource.org/licenses/MIT")) -ThisBuild / scalaVersion := "2.13.8" +ThisBuild / scalaVersion := "3.7.4" ThisBuild / developers := List( Developer( "bpholt", @@ -12,52 +12,67 @@ ThisBuild / developers := List( ), ) ThisBuild / startYear := Option(2021) -ThisBuild / libraryDependencies ++= Seq( - compilerPlugin("org.typelevel" %% "kind-projector" % "0.13.2" cross CrossVersion.full), - compilerPlugin("com.olegpy" %% "better-monadic-for" % "0.3.1"), -) +ThisBuild / resolvers += Resolver.sonatypeCentralSnapshots + +lazy val smithy = (project in file("smithy")) + .enablePlugins(Smithy4sCodegenPlugin) + .settings( + libraryDependencies ++= Seq( + "com.dwolla" %% "natchez-smithy4s" % "0.1.1", + "com.disneystreaming.smithy4s" %% "smithy4s-core" % smithy4sVersion.value, + "com.disneystreaming.smithy4s" %% "smithy4s-http4s" % smithy4sVersion.value, + "com.disneystreaming.smithy4s" %% "smithy4s-cats" % smithy4sVersion.value, + "com.disneystreaming.smithy4s" %% "smithy4s-json" % smithy4sVersion.value, + "com.disneystreaming.smithy4s" %% "smithy4s-aws-http4s" % smithy4sVersion.value + ), + smithy4sAwsSpecs ++= Seq(AWS.secretsManager) + ) -ThisBuild / githubWorkflowJavaVersions := Seq(JavaSpec.temurin("8"), JavaSpec.temurin("11")) +ThisBuild / githubWorkflowJavaVersions := Seq(JavaSpec.temurin("17")) ThisBuild / githubWorkflowTargetTags ++= Seq("v*") ThisBuild / githubWorkflowPublishTargetBranches := Seq.empty ThisBuild / githubWorkflowPublish := Seq.empty -lazy val munitV = "0.7.29" -lazy val circeV = "0.14.2" +lazy val circeV = "0.14.15" lazy val `postgresql-init-core` = (project in file(".")) .settings( maintainer := developers.value.head.email, topLevelDirectory := None, libraryDependencies ++= { - val natchezVersion = "0.1.6" - val feralVersion = "0.1.0-M13" + val natchezVersion = "0.3.8" + val feralVersion = "0.3.1-79-260ee83-SNAPSHOT" Seq( "org.typelevel" %% "feral-lambda-cloudformation-custom-resource" % feralVersion, "org.tpolecat" %% "natchez-xray" % natchezVersion, - "org.tpolecat" %% "natchez-http4s" % "0.3.2", - "org.typelevel" %% "cats-tagless-macros" % "0.14.0", - "org.http4s" %% "http4s-ember-client" % "0.23.13", + "org.tpolecat" %% "natchez-http4s" % "0.6.1", + "org.typelevel" %% "cats-tagless-core" % "0.16.3-85-591274f-SNAPSHOT", // see https://github.com/typelevel/cats-tagless/issues/652 + "org.http4s" %% "http4s-ember-client" % "0.23.32", "io.circe" %% "circe-parser" % circeV, "io.circe" %% "circe-generic" % circeV, - "io.circe" %% "circe-refined" % circeV, - "io.monix" %% "newtypes-core" % "0.2.3", - "io.monix" %% "newtypes-circe-v0-14" % "0.2.3", - "org.tpolecat" %% "skunk-core" % "0.3.1", - "org.typelevel" %% "log4cats-slf4j" % "2.3.2", - "com.amazonaws" % "aws-lambda-java-log4j2" % "1.5.1", - "org.apache.logging.log4j" % "log4j-slf4j-impl" % "2.18.0", - "com.chuusai" %% "shapeless" % "2.3.9", - "com.dwolla" %% "fs2-aws-java-sdk2" % "3.0.0-RC1", - "software.amazon.awssdk" % "secretsmanager" % "2.17.229", - "org.scalameta" %% "munit" % munitV % Test, - "org.scalameta" %% "munit-scalacheck" % munitV % Test, + "io.circe" %% "circe-literal" % circeV, + "io.circe" %% "circe-refined" % "0.14.9", + "io.monix" %% "newtypes-core" % "0.3.0", + "io.monix" %% "newtypes-circe-v0-14" % "0.3.0", + "org.tpolecat" %% "skunk-core" % "0.6.4", + "org.typelevel" %% "log4cats-slf4j" % "2.7.1", + "com.amazonaws" % "aws-lambda-java-log4j2" % "1.6.0", + "org.apache.logging.log4j" % "log4j-slf4j-impl" % "2.25.2", + "com.dwolla" %% "natchez-tagless" % "0.2.6-131-d6a1c7c-SNAPSHOT", + "org.typelevel" %% "mouse" % "1.4.0", + "com.comcast" %% "ip4s-core" % "3.7.0", + "org.scalameta" %% "munit" % "1.2.1" % Test, + "org.scalameta" %% "munit-scalacheck" % "1.2.0" % Test, "io.circe" %% "circe-literal" % circeV % Test, + "com.dwolla" %% "dwolla-otel-natchez" % "0.2.8" % Test, ) }, + buildInfoPackage := "com.dwolla.buildinfo.postgres.init", + buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion), ) - .enablePlugins(UniversalPlugin, JavaAppPackaging) + .dependsOn(smithy) + .enablePlugins(UniversalPlugin, JavaAppPackaging, BuildInfoPlugin) lazy val serverlessDeployCommand = settingKey[Seq[String]]("serverless command to deploy the application") serverlessDeployCommand := "serverless deploy --verbose".split(' ').toSeq diff --git a/project/build.properties b/project/build.properties index d738b85..a360cca 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.7.1 +sbt.version = 1.11.7 diff --git a/project/plugins.sbt b/project/plugins.sbt index d112a03..659f30e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,4 +1,5 @@ -addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.3.3") -addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") -addSbtPlugin("com.codecommit" % "sbt-github-actions" % "0.14.2") -addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.9") +addSbtPlugin("org.typelevel" % "sbt-typelevel-settings" % "0.8.2") +addSbtPlugin("org.typelevel" % "sbt-typelevel-mergify" % "0.8.2") +addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.16") +addSbtPlugin("com.disneystreaming.smithy4s" % "smithy4s-sbt-codegen" % "0.18.43") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.13.1") diff --git a/serverless.yml b/serverless.yml index 99da87f..a0648dc 100644 --- a/serverless.yml +++ b/serverless.yml @@ -4,7 +4,7 @@ variablesResolutionMode: 20210326 provider: name: aws - runtime: java11 + runtime: java17 memorySize: 1024 timeout: 60 region: us-west-2 diff --git a/smithy/src/main/smithy/secrets-manager.smithy b/smithy/src/main/smithy/secrets-manager.smithy new file mode 100644 index 0000000..ee1af66 --- /dev/null +++ b/smithy/src/main/smithy/secrets-manager.smithy @@ -0,0 +1,19 @@ +$version: "2.0" + +namespace com.dwolla.aws.secretsManager + +use smithy4s.meta#only +use com.dwolla.tracing.smithy#traceable + +apply com.amazonaws.secretsmanager#GetSecretValue @only + +apply com.amazonaws.secretsmanager#CreatedDateType @traceable +apply com.amazonaws.secretsmanager#ErrorMessage @traceable +apply com.amazonaws.secretsmanager#SecretARNType @traceable +apply com.amazonaws.secretsmanager#SecretBinaryType @traceable +apply com.amazonaws.secretsmanager#SecretIdType @traceable +apply com.amazonaws.secretsmanager#SecretNameType @traceable +apply com.amazonaws.secretsmanager#SecretStringType @traceable(redacted: "redacted SecretString") +apply com.amazonaws.secretsmanager#SecretVersionIdType @traceable +apply com.amazonaws.secretsmanager#SecretVersionStageType @traceable +apply com.amazonaws.secretsmanager#SecretVersionStagesType @traceable diff --git a/src/main/scala/com/dwolla/postgres/init/ExtractRequestProperties.scala b/src/main/scala/com/dwolla/postgres/init/ExtractRequestProperties.scala index 4c9f666..e00e1d5 100644 --- a/src/main/scala/com/dwolla/postgres/init/ExtractRequestProperties.scala +++ b/src/main/scala/com/dwolla/postgres/init/ExtractRequestProperties.scala @@ -1,27 +1,44 @@ package com.dwolla.postgres.init -import cats.syntax.all._ +import cats.syntax.all.* +import com.amazonaws.secretsmanager.SecretIdType +import com.comcast.ip4s.* import io.circe.generic.semiauto.deriveDecoder -import io.circe.refined._ -import io.circe.{Decoder, HCursor} +import io.circe.refined.* +import io.circe.{Decoder, HCursor, Json} +import io.circe.literal.* +import io.circe.syntax.EncoderOps +import natchez.* case class DatabaseMetadata(host: Host, port: Port, name: Database, username: MasterDatabaseUsername, password: MasterDatabasePassword, - secretIds: List[SecretId], + secretIds: List[SecretIdType], ) object DatabaseMetadata { - implicit val DecodeDatabaseMetadata: Decoder[DatabaseMetadata] = (c: HCursor) => - (c.downField("Host").as[Host], - c.downField("Port").as[Port], - c.downField("DatabaseName").as[Database], - c.downField("MasterDatabaseUsername").as[MasterDatabaseUsername], - c.downField("MasterDatabasePassword").as[MasterDatabasePassword], - c.downField("UserConnectionSecrets").as[List[SecretId]], - ).mapN(DatabaseMetadata.apply) + given Decoder[DatabaseMetadata] = Decoder.accumulatingInstance { (c: HCursor) => + (c.downField("Host").asAcc[Host], + c.downField("Port").asAcc[Port], + c.downField("DatabaseName").asAcc[Database], + c.downField("MasterDatabaseUsername").asAcc[MasterDatabaseUsername], + c.downField("MasterDatabasePassword").asAcc[MasterDatabasePassword], + c.downField("UserConnectionSecrets").asAcc[List[String]].nested.map(SecretIdType(_)).value, + ).mapN(DatabaseMetadata.apply) + } + + given TraceableValue[DatabaseMetadata] = TraceableValue[String].contramap { dm => + json"""{ + "Host": ${dm.host.toTraceValue}, + "Port": ${dm.port.toTraceValue}, + "DatabaseName": ${dm.name.toTraceValue}, + "MasterDatabaseUsername": ${dm.username.toTraceValue}, + "MasterDatabasePassword": ${dm.password.toTraceValue}, + "UserConnectionSecrets": ${dm.secretIds.map(_.toTraceValue)} + }""".noSpaces + } } case class UserConnectionInfo(database: Database, @@ -32,5 +49,36 @@ case class UserConnectionInfo(database: Database, ) object UserConnectionInfo { - implicit val UserConnectionInfoDecoder: Decoder[UserConnectionInfo] = deriveDecoder[UserConnectionInfo] + given Decoder[UserConnectionInfo] = deriveDecoder[UserConnectionInfo] + + given TraceableValue[UserConnectionInfo] = TraceableValue[String].contramap { uci => + json"""{ + "host": ${uci.host.toTraceValue}, + "port": ${uci.port.toTraceValue}, + "database": ${uci.database.toTraceValue}, + "user": ${uci.user.toTraceValue}, + "password": ${uci.password.toTraceValue} + }""".noSpaces + } } + +extension [A](a: A) + def toTraceValue(using TraceableValue[A]): Json = TraceableValue[A].toTraceValue(a) match + case TraceValue.StringValue(value) => value.asJson + case TraceValue.BooleanValue(value) => value.asJson + case TraceValue.NumberValue(value) => + value match + case i: java.lang.Byte => Json.fromInt(i.intValue) + case s: java.lang.Short => Json.fromInt(s.intValue) + case i: java.lang.Integer => Json.fromInt(i) + case l: java.lang.Long => Json.fromLong(l) + case f: java.lang.Float => Json.fromFloat(f).getOrElse(Json.Null) + case d: java.lang.Double => Json.fromDouble(d).getOrElse(Json.Null) + case bd: java.math.BigDecimal => + Json.fromBigDecimal(scala.math.BigDecimal(bd)) + case bi: java.math.BigInteger => + Json.fromBigInt(scala.math.BigInt(bi)) + case _ => + // Fallback: try BigDecimal to preserve value if possible + val bd = new java.math.BigDecimal(value.toString) + Json.fromBigDecimal(scala.math.BigDecimal(bd)) diff --git a/src/main/scala/com/dwolla/postgres/init/PostgresqlDatabaseInitHandler.scala b/src/main/scala/com/dwolla/postgres/init/PostgresqlDatabaseInitHandler.scala index 5d06888..faccd28 100644 --- a/src/main/scala/com/dwolla/postgres/init/PostgresqlDatabaseInitHandler.scala +++ b/src/main/scala/com/dwolla/postgres/init/PostgresqlDatabaseInitHandler.scala @@ -1,57 +1,102 @@ package com.dwolla.postgres.init -import cats.data._ -import cats.effect.std.{Console, Random} -import cats.effect.{Trace => _, _} -import cats.tagless.FunctorK.ops.toAllFunctorKOps +import cats.effect.std.{Console, Env, Random} +import cats.effect.syntax.all.* +import cats.effect.{Trace as _, *} +import cats.mtl.Local +import cats.syntax.all.* +import com.amazonaws.secretsmanager.SecretsManager import com.dwolla.postgres.init.aws.SecretsManagerAlg -import com.dwolla.tracing._ -import feral.lambda.cloudformation._ -import feral.lambda.{INothing, IOLambda, KernelSource, LambdaEnv, TracedHandler} -import natchez._ +import com.dwolla.postgres.init.repositories.{DatabaseRepository, RoleRepository, UserRepository} +import feral.lambda.cloudformation.* +import feral.lambda.{AwsTags, INothing, IOLambda, Invocation, KernelSource} +import fs2.compression.Compression +import fs2.io.file.Files +import fs2.io.net.Network +import mouse.all.* +import natchez.* import natchez.http4s.NatchezMiddleware +import natchez.mtl.* import natchez.xray.{XRay, XRayEnvironment} import org.http4s.client.{Client, middleware} import org.http4s.ember.client.EmberClientBuilder import org.typelevel.log4cats.Logger import org.typelevel.log4cats.slf4j.Slf4jLogger +import smithy4s.aws.kernel.AwsRegion +import smithy4s.aws.{AwsClient, AwsEnvironment} +@annotation.experimental class PostgresqlDatabaseInitHandler extends IOLambda[CloudFormationCustomResourceRequest[DatabaseMetadata], INothing] { - private def resources[F[_] : Async : Console]: Resource[F, LambdaEnv[F, CloudFormationCustomResourceRequest[DatabaseMetadata]] => F[Option[INothing]]] = + private def resources[F[_] : {Async, Compression, Console, Env, Files, Network}](entryPointOverride: Option[EntryPoint[F]]) + (using Local[F, Span[F]]): Resource[F, Invocation[F, CloudFormationCustomResourceRequest[DatabaseMetadata]] => F[Option[INothing]]] = for { - implicit0(logger: Logger[F]) <- Resource.eval(Slf4jLogger.create[F]) - implicit0(random: Random[F]) <- Resource.eval(Random.scalaUtilRandom[F]) + given Logger[F] <- Resource.eval(Slf4jLogger.create[F]) + given Random[F] <- Resource.eval(Random.scalaUtilRandom[F]) client <- httpClient[F] - entryPoint <- XRayEnvironment[Resource[F, *]].daemonAddress.flatMap { - case Some(addr) => XRay.entryPoint(addr) - case None => XRay.entryPoint[F]() + entryPoint <- entryPointOverride.toOptionT[Resource[F, *]].getOrElseF { + XRayEnvironment[F].daemonAddress.toResource.flatMap { + case Some(addr) => XRay.entryPoint(addr) + case None => XRay.entryPoint[F]() + } + } + region <- Env[F].get("AWS_REGION").liftEitherT(new RuntimeException("missing AWS_REGION environment variable")).map(AwsRegion(_)).rethrowT.toResource + awsEnv <- AwsEnvironment.default(client, region) + secretsManager <- AwsClient(SecretsManager, awsEnv).map(SecretsManagerAlg[F](_)) + } yield { implicit env: Invocation[F, CloudFormationCustomResourceRequest[DatabaseMetadata]] => + TracedHandler(entryPoint) { _ => + CloudFormationCustomResource(client, PostgresqlDatabaseInitHandlerImpl( + secretsManager, + DatabaseRepository[F], + RoleRepository[F], + UserRepository[F], + )) } - secretsManager <- SecretsManagerAlg.resource[F].map(_.mapK(Kleisli.liftK[F, Span[F]]).withTracing) - } yield { implicit env: LambdaEnv[F, CloudFormationCustomResourceRequest[DatabaseMetadata]] => - TracedHandler(entryPoint, Kleisli { (span: Span[F]) => - CloudFormationCustomResource(tracedHttpClient(client, span), PostgresqlDatabaseInitHandlerImpl(secretsManager)).run(span) - }) } - override def handler: Resource[IO, LambdaEnv[IO, CloudFormationCustomResourceRequest[DatabaseMetadata]] => IO[Option[INothing]]] = - resources[IO] + def handler(entryPoint: EntryPoint[IO]): Resource[IO, Invocation[IO, CloudFormationCustomResourceRequest[DatabaseMetadata]] => IO[Option[INothing]]] = + IO.local(Span.noop[IO]).toResource + .flatMap(implicit l => resources[IO](entryPoint.some)) - private implicit def kleisliLogger[F[_] : Logger, A]: Logger[Kleisli[F, A, *]] = Logger[F].mapK(Kleisli.liftK) + override def handler: Resource[IO, Invocation[IO, CloudFormationCustomResourceRequest[DatabaseMetadata]] => IO[Option[INothing]]] = + IO.local(Span.noop[IO]).toResource + .flatMap(implicit l => resources[IO](None)) /** - * The XRay kernel comes from environment variables, so we don't need to extract anything from the incoming event + * The X-Ray kernel comes from environment variables, so we don't need to extract anything from the incoming event. + * The kernel will be sourced from the environment/system properties if useEnvironmentFallback is true when + * initializing the X-Ray entrypoint. */ - private implicit def kernelSource[Event]: KernelSource[Event] = KernelSource.emptyKernelSource + private given [Event]: KernelSource[Event] = KernelSource.emptyKernelSource - private def httpClient[F[_] : Async]: Resource[F, Client[F]] = + private def httpClient[F[_] : {Async, Network, Trace}]: Resource[F, Client[F]] = EmberClientBuilder .default[F] .build .map(middleware.Logger[F](logHeaders = true, logBody = true)) + .map(NatchezMiddleware.client(_)) - private def tracedHttpClient[F[_] : MonadCancelThrow](client: Client[F], span: Span[F]): Client[Kleisli[F, Span[F], *]] = - NatchezMiddleware.client(client.translate(Kleisli.liftK[F, Span[F]])(Kleisli.applyK(span))) +} +// TODO replace with https://github.com/typelevel/feral/pull/591 once it's merged +object TracedHandler { + def apply[F[_] : MonadCancelThrow, Event, Result](entryPoint: EntryPoint[F]) + (handler: Trace[F] => F[Option[Result]]) + (using Invocation[F, Event], + KernelSource[Event], + Local[F, Span[F]]): F[Option[Result]] = + for { + event <- Invocation[F, Event].event + context <- Invocation[F, Event].context + kernel = KernelSource[Event].extract(event) + result <- entryPoint.continueOrElseRoot(context.functionName, kernel).use { + Local[F, Span[F]].scope { + Trace[F].put( + AwsTags.arn(context.invokedFunctionArn), + AwsTags.requestId(context.awsRequestId) + ) >> handler(Trace[F]) + } + } + } yield result } diff --git a/src/main/scala/com/dwolla/postgres/init/PostgresqlDatabaseInitHandlerImpl.scala b/src/main/scala/com/dwolla/postgres/init/PostgresqlDatabaseInitHandlerImpl.scala index 4541744..4bf5c30 100644 --- a/src/main/scala/com/dwolla/postgres/init/PostgresqlDatabaseInitHandlerImpl.scala +++ b/src/main/scala/com/dwolla/postgres/init/PostgresqlDatabaseInitHandlerImpl.scala @@ -1,83 +1,85 @@ package com.dwolla.postgres.init -import cats._ +import cats.* import cats.effect.std.Console -import cats.effect.{Trace => _, _} -import cats.syntax.all._ -import com.dwolla.postgres.init.PostgresqlDatabaseInitHandlerImpl.databaseAsPhysicalResourceId -import com.dwolla.postgres.init.aws.{ResourceNotFoundException, SecretsManagerAlg} -import com.dwolla.postgres.init.repositories.CreateSkunkSession._ -import com.dwolla.postgres.init.repositories._ +import cats.effect.{Trace as _, *} +import cats.syntax.all.* +import cats.tagless.aop.Aspect +import cats.tagless.{Derive, Trivial} +import com.amazonaws.secretsmanager.{ResourceNotFoundException, SecretIdType} +import com.dwolla.postgres.init.aws.SecretsManagerAlg +import com.dwolla.postgres.init.repositories.* +import com.dwolla.postgres.init.repositories.CreateSkunkSession.* +import com.dwolla.tracing.syntax.* import feral.lambda.INothing -import feral.lambda.cloudformation._ +import feral.lambda.cloudformation.* import fs2.io.net.Network -import natchez._ +import natchez.* import org.typelevel.log4cats.Logger -class PostgresqlDatabaseInitHandlerImpl[F[_] : Concurrent : Trace : Network : Console : Logger](secretsManagerAlg: SecretsManagerAlg[F], - databaseRepository: DatabaseRepository[InSession[F, *]], - roleRepository: RoleRepository[InSession[F, *]], - userRepository: UserRepository[InSession[F, *]], - ) extends CloudFormationCustomResource[F, DatabaseMetadata, INothing] { - override def createResource(event: DatabaseMetadata): F[HandlerResponse[INothing]] = - handleCreateOrUpdate(event)(createOrUpdate(_, event)).map(HandlerResponse(_, None)) +trait PostgresqlDatabaseInitHandlerImpl[F[_]] extends CloudFormationCustomResource[F, DatabaseMetadata, INothing] - override def updateResource(event: DatabaseMetadata, physicalResourceId: PhysicalResourceId): F[HandlerResponse[INothing]] = - handleCreateOrUpdate(event)(createOrUpdate(_, event)).map(HandlerResponse(_, None)) +@annotation.experimental +object PostgresqlDatabaseInitHandlerImpl { + given TraceableValue[feral.lambda.cloudformation.PhysicalResourceId] = TraceableValue[String].contramap(_.value) - override def deleteResource(event: DatabaseMetadata, physicalResourceId: PhysicalResourceId): F[HandlerResponse[INothing]] = - for { - usernames <- getUsernamesFromSecrets(event.secretIds, UserRepository.usernameForDatabase(event.name)) - dbId <- removeUsersFromDatabase(usernames, event.name).inSession(event.host, event.port, event.username, event.password) - } yield HandlerResponse(dbId, None) + given Aspect[PostgresqlDatabaseInitHandlerImpl, TraceableValue, Trivial] = Derive.aspect - private def createOrUpdate(userPasswords: List[UserConnectionInfo], input: DatabaseMetadata): InSession[F, PhysicalResourceId] = - for { - db <- databaseAsPhysicalResourceId[InSession[F, *]](input.name) - _ <- databaseRepository.createDatabase(input) - _ <- roleRepository.createRole(input.name) - _ <- userPasswords.traverse { userPassword => - userRepository.addOrUpdateUser(userPassword) >> roleRepository.addUserToRole(userPassword.user, userPassword.database) - } - } yield db + private[PostgresqlDatabaseInitHandlerImpl] def databaseAsPhysicalResourceId[F[_] : ApplicativeThrow](db: Database): F[PhysicalResourceId] = + PhysicalResourceId(db.value.value).liftTo[F](new RuntimeException("Database name was invalid as Physical Resource ID")) + + def apply[F[_] : {Temporal, Trace, Network, Console, Logger}](secretsManagerAlg: SecretsManagerAlg[F], + databaseRepository: DatabaseRepository[InSession[F, *]], + roleRepository: RoleRepository[InSession[F, *]], + userRepository: UserRepository[InSession[F, *]], + ): PostgresqlDatabaseInitHandlerImpl[F] = new PostgresqlDatabaseInitHandlerImpl[F] { + override def createResource(event: DatabaseMetadata): F[HandlerResponse[INothing]] = + handleCreateOrUpdate(event)(createOrUpdate(_, event)).map(HandlerResponse(_, None)) + + override def updateResource(event: DatabaseMetadata, physicalResourceId: PhysicalResourceId): F[HandlerResponse[INothing]] = + handleCreateOrUpdate(event)(createOrUpdate(_, event)).map(HandlerResponse(_, None)) - private def handleCreateOrUpdate(input: DatabaseMetadata) - (f: List[UserConnectionInfo] => InSession[F, PhysicalResourceId]): F[PhysicalResourceId] = - for { - userPasswords <- input.secretIds.traverse(secretsManagerAlg.getSecretAs[UserConnectionInfo]) - id <- f(userPasswords).inSession(input.host, input.port, input.username, input.password) - } yield id + override def deleteResource(event: DatabaseMetadata, physicalResourceId: PhysicalResourceId): F[HandlerResponse[INothing]] = + for { + usernames <- getUsernamesFromSecrets(event.secretIds, UserRepository.usernameForDatabase(event.name)) + dbId <- removeUsersFromDatabase(usernames, event.name).inSession(event.host, event.port, event.username, event.password) + } yield HandlerResponse(dbId, None) - private def getUsernamesFromSecrets(secretIds: List[SecretId], fallback: Username): F[List[Username]] = - secretIds.traverse { secretId => - secretsManagerAlg.getSecretAs[UserConnectionInfo](secretId) - .map(_.user) - .recoverWith { - case ex: ResourceNotFoundException => - Logger[F].warn(ex)(s"could not retrieve secret ${secretId.value}, falling back to ${fallback.value}") - .as(fallback) + private def createOrUpdate(userPasswords: List[UserConnectionInfo], input: DatabaseMetadata): InSession[F, PhysicalResourceId] = + for { + db <- databaseAsPhysicalResourceId[InSession[F, *]](input.name) + _ <- databaseRepository.createDatabase(input) + _ <- roleRepository.createRole(input.name) + _ <- userPasswords.traverse { userPassword => + userRepository.addOrUpdateUser(userPassword) >> roleRepository.addUserToRole(userPassword.user, userPassword.database) } - } + } yield db - private def removeUsersFromDatabase(usernames: List[Username], databaseName: Database): InSession[F, PhysicalResourceId] = - for { - db <- databaseAsPhysicalResourceId[InSession[F, *]](databaseName) - _ <- usernames.traverse(roleRepository.removeUserFromRole(_, databaseName)) - _ <- databaseRepository.removeDatabase(databaseName) - _ <- roleRepository.removeRole(databaseName) - _ <- usernames.traverse(userRepository.removeUser) - } yield db -} + private def handleCreateOrUpdate(input: DatabaseMetadata) + (f: List[UserConnectionInfo] => InSession[F, PhysicalResourceId]): F[PhysicalResourceId] = + for { + userPasswords <- input.secretIds.traverse(secretsManagerAlg.getSecretAs[UserConnectionInfo]) + id <- f(userPasswords).inSession(input.host, input.port, input.username, input.password) + } yield id -object PostgresqlDatabaseInitHandlerImpl { - def apply[F[_] : Temporal : Trace : Network : Console : Logger](secretsManager: SecretsManagerAlg[F]): PostgresqlDatabaseInitHandlerImpl[F] = - new PostgresqlDatabaseInitHandlerImpl( - secretsManager, - DatabaseRepository[F], - RoleRepository[F], - UserRepository[F], - ) + private def getUsernamesFromSecrets(secretIds: List[SecretIdType], fallback: Username): F[List[Username]] = + secretIds.traverse { secretId => + secretsManagerAlg.getSecretAs[UserConnectionInfo](secretId) + .map(_.user) + .recoverWith { + case ex: ResourceNotFoundException => + Logger[F].warn(ex)(s"could not retrieve secret ${secretId.value}, falling back to ${fallback.value}") + .as(fallback) + } + } - private[PostgresqlDatabaseInitHandlerImpl] def databaseAsPhysicalResourceId[F[_] : ApplicativeThrow](db: Database): F[PhysicalResourceId] = - PhysicalResourceId(db.value.value).liftTo[F](new RuntimeException("Database name was invalid as Physical Resource ID")) + private def removeUsersFromDatabase(usernames: List[Username], databaseName: Database): InSession[F, PhysicalResourceId] = + for { + db <- databaseAsPhysicalResourceId[InSession[F, *]](databaseName) + _ <- usernames.traverse(roleRepository.removeUserFromRole(_, databaseName)) + _ <- databaseRepository.removeDatabase(databaseName) + _ <- roleRepository.removeRole(databaseName) + _ <- usernames.traverse(userRepository.removeUser) + } yield db + }.traceWithInputs } diff --git a/src/main/scala/com/dwolla/postgres/init/aws/SecretsManagerAlg.scala b/src/main/scala/com/dwolla/postgres/init/aws/SecretsManagerAlg.scala index 683bc13..9c39e1d 100644 --- a/src/main/scala/com/dwolla/postgres/init/aws/SecretsManagerAlg.scala +++ b/src/main/scala/com/dwolla/postgres/init/aws/SecretsManagerAlg.scala @@ -1,40 +1,100 @@ package com.dwolla.postgres.init package aws -import cats.syntax.all._ -import cats.effect.{Trace => _, _} -import cats.tagless.Derive -import cats.tagless.aop.Instrument -import com.dwolla.fs2aws.AwsEval -import io.circe.{Decoder, parser} +import cats.* +import cats.effect.{Trace as _, *} +import cats.syntax.all.* +import cats.tagless.aop.* +import com.amazonaws.secretsmanager.* +import com.dwolla.tagless.WeaveKnot +import com.dwolla.tracing.syntax.* +import io.circe.jawn.JawnParser +import io.circe.{Decoder, Errors} +import natchez.{Trace, TraceableValue} import org.typelevel.log4cats.Logger -import software.amazon.awssdk.services.secretsmanager.SecretsManagerAsyncClient -import software.amazon.awssdk.services.secretsmanager.model._ trait SecretsManagerAlg[F[_]] { - def getSecret(secretId: SecretId): F[String] - def getSecretAs[A : Decoder](secretId: SecretId): F[A] + def getSecret(secretId: SecretIdType): F[Secret] + def getSecretAs[A : Decoder](secretId: SecretIdType): F[A] } object SecretsManagerAlg { - implicit val SecretsManagerAlgInstrumentation: Instrument[SecretsManagerAlg] = Derive.instrument + given Aspect[SecretsManagerAlg, TraceableValue, TraceableValue] = new Aspect[SecretsManagerAlg, TraceableValue, TraceableValue] { + override def weave[F[_]](af: SecretsManagerAlg[F]): SecretsManagerAlg[Aspect.Weave[F, TraceableValue, TraceableValue, *]] = + new SecretsManagerAlg[Aspect.Weave[F, TraceableValue, TraceableValue, *]] { + override def getSecret(secretId: SecretIdType): Aspect.Weave[F, TraceableValue, TraceableValue, Secret] = + Aspect.Weave( + "SecretsManagerAlg", + List(List( + Aspect.Advice.byValue("secretId", secretId), + )), + Aspect.Advice("getSecret", af.getSecret(secretId)) + ) - def resource[F[_] : Async : Logger]: Resource[F, SecretsManagerAlg[F]] = - Resource.fromAutoCloseable(Sync[F].delay(SecretsManagerAsyncClient.builder().build())) - .map(SecretsManagerAlg[F](_)) + override def getSecretAs[A: Decoder](secretId: SecretIdType): Aspect.Weave[F, TraceableValue, TraceableValue, A] = + Aspect.Weave( + "SecretsManagerAlg", + List( + List(Aspect.Advice.byValue("secretId", secretId)), + List(Aspect.Advice.byValue("implicit decoder", Decoder[A].toString)), + ), + Aspect.Advice("getSecretAs", af.getSecretAs[A](secretId))(using TraceableValue[String].contramap[A](_ => "redacted successfully parsed and decoded secret")) + ) + } - def apply[F[_] : Async : Logger](client: SecretsManagerAsyncClient): SecretsManagerAlg[F] = new SecretsManagerAlg[F] { - override def getSecret(secretId: SecretId): F[String] = + override def mapK[F[_], G[_]](af: SecretsManagerAlg[F])(fk: F ~> G): SecretsManagerAlg[G] = + new SecretsManagerAlg[G] { + override def getSecret(secretId: SecretIdType): G[Secret] = fk(af.getSecret(secretId)) + override def getSecretAs[A: Decoder](secretId: SecretIdType): G[A] = fk(af.getSecretAs[A](secretId)) + } + } + + def apply[F[_] : Async : Logger : Trace](client: SecretsManager[F]): SecretsManagerAlg[F] = + WeaveKnot[SecretsManagerAlg, F](apply(client, _))(_.traceWithInputsAndOutputs) + + private def apply[F[_] : Async : Logger](client: SecretsManager[F], + self: Eval[SecretsManagerAlg[F]], + ): SecretsManagerAlg[F] = new SecretsManagerAlg[F] { + private val parser = new JawnParser + + override def getSecret(secretId: SecretIdType): F[Secret] = Logger[F].info(s"retrieving secret id $secretId") >> - AwsEval.eval[F](GetSecretValueRequest.builder().secretId(secretId.value).build())(client.getSecretValue)(_.secretString()) - - override def getSecretAs[A: Decoder](secretId: SecretId): F[A] = - for { - secretString <- getSecret(secretId) - secretJson <- parser.parse(secretString).liftTo[F] - a <- secretJson.as[A].liftTo[F] - } yield a + client.getSecretValue(secretId) + .flatMap { + case GetSecretValueResponse(_, _, _, None, Some(txt), _, _) => + SecretString(txt).pure[F].widen + case GetSecretValueResponse(_, _, _, Some(blob), None, _, _) => + SecretBinary(blob).pure[F].widen + case GetSecretValueResponse(_, _, _, Some(blob), Some(_), _, _) => + SecretBinary(blob).pure[F].widen + case _ => + NoSecretInResponseException(secretId).raiseError[F, Secret] + } + + override def getSecretAs[A: Decoder](secretId: SecretIdType): F[A] = + self.value.getSecret(secretId) + .flatMap { + case SecretString(SecretStringType(value)) => parser.parse(value).liftTo[F] + case SecretBinary(SecretBinaryType(value)) => parser.parseByteBuffer(value.asByteBuffer).liftTo[F] + } + .flatMap { + _.asAccumulating[A] + .toEither + .leftMap(Errors(_)) + .liftTo[F] + } + } +} + +sealed trait Secret +case class SecretString(value: SecretStringType) extends Secret +case class SecretBinary(value: SecretBinaryType) extends Secret + +object Secret { + given TraceableValue[Secret] = TraceableValue[String].contramap { + case SecretString(_) => "redacted string secret" + case SecretBinary(_) => "redacted binary secret" } } -case class ResourceNotFoundException(resource: String, cause: Option[Throwable]) extends RuntimeException(resource, cause.orNull) +case class NoSecretInResponseException(resource: SecretIdType) extends RuntimeException(resource.value) diff --git a/src/main/scala/com/dwolla/postgres/init/package.scala b/src/main/scala/com/dwolla/postgres/init/package.scala index e9ee779..8a80f5a 100644 --- a/src/main/scala/com/dwolla/postgres/init/package.scala +++ b/src/main/scala/com/dwolla/postgres/init/package.scala @@ -1,72 +1,55 @@ -package com.dwolla.postgres - -import eu.timepit.refined._ -import eu.timepit.refined.api.Refined -import eu.timepit.refined.string._ -import monix.newtypes.NewtypeWrapped +package com.dwolla.postgres.init + +import cats.syntax.all.* +import com.comcast.ip4s.{Host, Port} +import eu.timepit.refined.* +import eu.timepit.refined.api.* +import eu.timepit.refined.string.* +import io.circe.Decoder +import monix.newtypes.{HasExtractor, NewtypeWrapped} import monix.newtypes.integrations.DerivedCirceCodec -import shapeless.ops.hlist -import shapeless.ops.tuple._ -import shapeless.syntax.std.tuple._ -import shapeless.{HList, LabelledGeneric} - -package object init { - implicit class ApplyAll[P <: Product](p: P) { - def applyAll[A, B, O](a: A) - (implicit - cm: ConstMapper.Aux[P, A, B], - za: ZipApply.Aux[P, B, O], - ): O = - p.zipApply(p.mapConst(a)) - } +import natchez.TraceableValue - implicit class MigrationOps[A](a: A) { - def migrateTo[B](implicit migration: Migration[A, B]): B = - migration.apply(a) - } +given [A: TraceableValue, P]: TraceableValue[A Refined P] = TraceableValue[A].contramap(_.value) - implicit def genericMigration[A, B, ARepr <: HList, BRepr <: HList](implicit - aGen: LabelledGeneric.Aux[A, ARepr], - bGen: LabelledGeneric.Aux[B, BRepr], - inter: hlist.Intersection.Aux[ARepr, BRepr, BRepr] - ): Migration[A, B] = - a => bGen.from(inter.apply(aGen.to(a))) +type SqlIdentifierPredicate = MatchesRegex["[A-Za-z][A-Za-z0-9_]*"] - type SqlIdentifierPredicate = MatchesRegex[W.`"[A-Za-z][A-Za-z0-9_]*"`.T] - type SqlIdentifier = String Refined SqlIdentifierPredicate - type GeneratedPasswordPredicate = MatchesRegex[W.`"""[-A-Za-z0-9!"#$%&()*+,./:<=>?@\\[\\]\\\\^_{|}~]+"""`.T] - type GeneratedPassword = String Refined GeneratedPasswordPredicate +type SqlIdentifier = String Refined SqlIdentifierPredicate +object SqlIdentifier extends RefinedTypeOps[SqlIdentifier, String] - type MasterDatabaseUsername = MasterDatabaseUsername.Type - object MasterDatabaseUsername extends NewtypeWrapped[SqlIdentifier] with DerivedCirceCodec +type GeneratedPasswordPredicate = MatchesRegex["""[-A-Za-z0-9!"#$%&()*+,./:<=>?@\[\]\\^_{|}~]+"""] - type MasterDatabasePassword = MasterDatabasePassword.Type - object MasterDatabasePassword extends NewtypeWrapped[String] with DerivedCirceCodec +type MasterDatabaseUsername = MasterDatabaseUsername.Type +object MasterDatabaseUsername extends NewtypeWrapped[SqlIdentifier] with DerivedCirceCodec with DerivedTraceableValueFromNewtype - type SecretId = SecretId.Type - object SecretId extends NewtypeWrapped[String] with DerivedCirceCodec +type MasterDatabasePassword = MasterDatabasePassword.Type +object MasterDatabasePassword extends NewtypeWrapped[String] with DerivedCirceCodec { + given TraceableValue[MasterDatabasePassword] = TraceableValue[String].contramap(_ => "redacted password") +} - type Host = Host.Type - object Host extends NewtypeWrapped[String] with DerivedCirceCodec +private[init] given Decoder[Host] = + Decoder[String].emap(s => Host.fromString(s).toRight(s"$s could not be decoded as a Host")) +private[init] given TraceableValue[Host] = TraceableValue[String].contramap(_.show) - type Port = Port.Type - object Port extends NewtypeWrapped[Int] with DerivedCirceCodec +private[init] given Decoder[Port] = + Decoder[Int].emap(i => Port.fromInt(i).toRight(s"$i could not be decoded as a Port")) +private[init] given TraceableValue[Port] = TraceableValue[Int].contramap(_.value) - type Username = Username.Type - object Username extends NewtypeWrapped[SqlIdentifier] with DerivedCirceCodec +type Username = Username.Type +object Username extends NewtypeWrapped[SqlIdentifier] with DerivedCirceCodec with DerivedTraceableValueFromNewtype - type Password = Password.Type - object Password extends NewtypeWrapped[GeneratedPassword] with DerivedCirceCodec +type Password = Password.Type +object Password extends NewtypeWrapped[String Refined GeneratedPasswordPredicate] with DerivedCirceCodec { + given TraceableValue[Password] = TraceableValue[String].contramap(_ => "redacted password") +} - type Database = Database.Type - object Database extends NewtypeWrapped[SqlIdentifier] with DerivedCirceCodec +type Database = Database.Type +object Database extends NewtypeWrapped[SqlIdentifier] with DerivedCirceCodec with DerivedTraceableValueFromNewtype - type RoleName = RoleName.Type - object RoleName extends NewtypeWrapped[SqlIdentifier] with DerivedCirceCodec -} +type RoleName = RoleName.Type +object RoleName extends NewtypeWrapped[SqlIdentifier] with DerivedCirceCodec with DerivedTraceableValueFromNewtype -package init { - trait Migration[A, B] { - def apply(a: A): B - } +trait DerivedTraceableValueFromNewtype { + given [T, S](using HasExtractor.Aux[T, S], TraceableValue[S]): TraceableValue[T] = + TraceableValue[S].contramap(summon[HasExtractor.Aux[T, S]].extract) } diff --git a/src/main/scala/com/dwolla/postgres/init/repositories/CreateSkunkSession.scala b/src/main/scala/com/dwolla/postgres/init/repositories/CreateSkunkSession.scala index 20eb71f..ee2300d 100644 --- a/src/main/scala/com/dwolla/postgres/init/repositories/CreateSkunkSession.scala +++ b/src/main/scala/com/dwolla/postgres/init/repositories/CreateSkunkSession.scala @@ -2,15 +2,19 @@ package com.dwolla.postgres.init package repositories import cats.MonadThrow -import cats.data._ -import cats.effect._ +import cats.data.* +import cats.effect.* import cats.effect.std.Console -import cats.syntax.all._ +import cats.syntax.all.* +import com.comcast.ip4s.* import fs2.io.net.Network import natchez.Trace -import skunk._ +import skunk.* import skunk.util.Typer +import scala.concurrent.duration.Duration + +@FunctionalInterface trait CreateSkunkSession[F[_]] { def single(host: String, port: Int = 5432, @@ -23,42 +27,38 @@ trait CreateSkunkSession[F[_]] { parameters: Map[String, String] = Session.DefaultConnectionParameters, commandCache: Int = 1024, queryCache: Int = 1024, + parseCache: Int = 1024, + readTimeout: Duration = Duration.Inf, ): Resource[F, Session[F]] } object CreateSkunkSession { type InSession[F[_], A] = Kleisli[F, Session[F], A] - implicit class InSessionOps[F[_], A](val kleisli: Kleisli[F, Session[F], A]) extends AnyVal { + extension [F[_], A](kleisli: Kleisli[F, Session[F], A]) def inSession(host: Host, port: Port, username: MasterDatabaseUsername, password: MasterDatabasePassword, ) - (implicit - `🦨`: CreateSkunkSession[F], - `[]`: MonadCancelThrow[F]): F[A] = + (using CreateSkunkSession[F], MonadCancelThrow[F]): F[A] = CreateSkunkSession[F].single( - host = host.value, + host = host.show, port = port.value, user = username.value.value, database = "postgres", password = password.value.some, - ssl = SSL.System, + ssl = if (host == host"localhost") SSL.None else SSL.System, ).use(kleisli.run) - } - implicit class IgnoreErrorOps[F[_], A](val fa: F[A]) extends AnyVal { + extension [F[_], A](fa: F[A]) def recoverUndefinedAs(a: A) - (implicit `[]`: MonadThrow[F]): F[A] = - fa.recover { + (using MonadThrow[F]): F[A] = + fa.recover: case SqlState.UndefinedObject(_) => a case SqlState.InvalidCatalogName(_) => a - } - } def apply[F[_] : CreateSkunkSession]: CreateSkunkSession[F] = implicitly - implicit def instance[F[_] : Concurrent : Trace : Network : Console]: CreateSkunkSession[F] = - Session.single _ + given [F[_] : {Temporal, Trace, Network, Console}]: CreateSkunkSession[F] = Session.single } diff --git a/src/main/scala/com/dwolla/postgres/init/repositories/DatabaseRepository.scala b/src/main/scala/com/dwolla/postgres/init/repositories/DatabaseRepository.scala index 590537a..0511a06 100644 --- a/src/main/scala/com/dwolla/postgres/init/repositories/DatabaseRepository.scala +++ b/src/main/scala/com/dwolla/postgres/init/repositories/DatabaseRepository.scala @@ -1,28 +1,29 @@ package com.dwolla.postgres.init package repositories -import cats.data._ -import cats.effect.{Trace => _, _} -import cats.syntax.all._ +import cats.data.* +import cats.effect.{Trace as _, *} +import cats.syntax.all.* import cats.tagless.Derive -import cats.tagless.aop.Instrument -import com.dwolla.postgres.init.repositories.CreateSkunkSession._ -import com.dwolla.tracing._ -import natchez.Trace +import cats.tagless.aop.Aspect +import com.dwolla.postgres.init.repositories.CreateSkunkSession.* +import com.dwolla.tracing.syntax.* +import natchez.{Trace, TraceableValue} import org.typelevel.log4cats.Logger -import skunk._ -import skunk.codec.all._ -import skunk.implicits._ +import skunk.* +import skunk.codec.all.* +import skunk.implicits.* trait DatabaseRepository[F[_]] { def createDatabase(db: DatabaseMetadata): F[Database] def removeDatabase(database: Database): F[Database] } +@annotation.experimental object DatabaseRepository { - implicit val DatabaseRepositoryInstrument: Instrument[DatabaseRepository] = Derive.instrument + given Aspect[DatabaseRepository, TraceableValue, TraceableValue] = Derive.aspect - def apply[F[_] : MonadCancelThrow : Logger : Trace]: DatabaseRepository[InSession[F, *]] = new DatabaseRepository[InSession[F, *]] { + def apply[F[_] : {MonadCancelThrow, Logger, Trace}]: DatabaseRepository[InSession[F, *]] = new DatabaseRepository[InSession[F, *]] { override def createDatabase(db: DatabaseMetadata): Kleisli[F, Session[F], Database] = checkDatabaseExists(db) .ifM(createDatabase(db.name, db.username), Logger[F].mapK(Kleisli.liftK[F, Session[F]]).info(s"No-op: database ${db.name} already exists")) @@ -30,8 +31,7 @@ object DatabaseRepository { private def checkDatabaseExists(db: DatabaseMetadata): Kleisli[F, Session[F], Boolean] = Kleisli { _ - .prepare(DatabaseQueries.checkDatabaseExists) - .use(_.unique(db.name)) + .unique(DatabaseQueries.checkDatabaseExists)(db.name) .flatTap { count => Logger[F].info(s"Found $count databases matching ${db.name} on ${db.username}@${db.host}:${db.port}") } @@ -56,7 +56,7 @@ object DatabaseRepository { .as(database) .recoverUndefinedAs(database) } - }.withTracing + }.traceWithInputsAndOutputs } object DatabaseQueries { diff --git a/src/main/scala/com/dwolla/postgres/init/repositories/RoleRepository.scala b/src/main/scala/com/dwolla/postgres/init/repositories/RoleRepository.scala index 89fe8ad..245ee41 100644 --- a/src/main/scala/com/dwolla/postgres/init/repositories/RoleRepository.scala +++ b/src/main/scala/com/dwolla/postgres/init/repositories/RoleRepository.scala @@ -1,20 +1,21 @@ package com.dwolla.postgres.init package repositories -import cats.data._ -import cats.effect.{Trace => _, _} -import cats.syntax.all._ +import cats.data.* +import cats.effect.{Trace as _, *} +import cats.syntax.all.* import cats.tagless.Derive -import cats.tagless.aop.Instrument -import com.dwolla.postgres.init.repositories.CreateSkunkSession._ -import com.dwolla.tracing._ +import cats.tagless.aop.Aspect +import com.dwolla.postgres.init.repositories.CreateSkunkSession.* +import com.dwolla.tracing.syntax.* +import com.dwolla.tracing.LowPriorityTraceableValueInstances.* import eu.timepit.refined.api.Refined -import eu.timepit.refined.auto._ -import natchez.Trace +import eu.timepit.refined.auto.* +import natchez.{Trace, TraceableValue} import org.typelevel.log4cats.Logger -import skunk._ -import skunk.codec.all._ -import skunk.implicits._ +import skunk.* +import skunk.codec.all.* +import skunk.implicits.* trait RoleRepository[F[_]] { def createRole(database: Database): F[Unit] @@ -23,8 +24,9 @@ trait RoleRepository[F[_]] { def removeUserFromRole(username: Username, database: Database): F[Unit] } +@annotation.experimental object RoleRepository { - implicit val RoleRepositoryInstrument: Instrument[RoleRepository] = Derive.instrument + given Aspect[RoleRepository, TraceableValue, TraceableValue] = Derive.aspect def roleNameForDatabase(database: Database): RoleName = RoleName(Refined.unsafeApply(database.value + "_role")) @@ -39,8 +41,7 @@ object RoleRepository { private def checkRoleExists(role: RoleName): Kleisli[F, Session[F], Boolean] = Kleisli { _ - .prepare(RoleQueries.countRoleByName) - .use(_.unique(role)) + .unique(RoleQueries.countRoleByName)(role) .flatTap { count => Logger[F].info(s"found $count roles named $role") } @@ -108,7 +109,7 @@ object RoleRepository { .void .recoverUndefinedAs(()) } - }.withTracing + }.traceWithInputsAndOutputs } object RoleQueries { diff --git a/src/main/scala/com/dwolla/postgres/init/repositories/UserRepository.scala b/src/main/scala/com/dwolla/postgres/init/repositories/UserRepository.scala index 918f1fd..7358239 100644 --- a/src/main/scala/com/dwolla/postgres/init/repositories/UserRepository.scala +++ b/src/main/scala/com/dwolla/postgres/init/repositories/UserRepository.scala @@ -1,36 +1,37 @@ package com.dwolla.postgres.init package repositories -import cats.data._ -import cats.effect.{Trace => _, _} -import cats.syntax.all._ +import cats.data.* +import cats.effect.{Trace as _, *} +import cats.syntax.all.* import cats.tagless.Derive -import cats.tagless.aop.Instrument -import com.dwolla.postgres.init.repositories.CreateSkunkSession._ -import com.dwolla.tracing._ +import cats.tagless.aop.Aspect +import com.dwolla.postgres.init.repositories.CreateSkunkSession.* +import com.dwolla.tracing.syntax.* import eu.timepit.refined.api.Refined import eu.timepit.refined.refineV -import natchez.Trace -import org.typelevel.log4cats._ -import skunk._ -import skunk.codec.all._ -import skunk.implicits._ +import natchez.{Trace, TraceableValue} +import org.typelevel.log4cats.* +import skunk.* +import skunk.codec.all.* +import skunk.implicits.* -import scala.concurrent.duration._ +import scala.concurrent.duration.* trait UserRepository[F[_]] { def addOrUpdateUser(userConnectionInfo: UserConnectionInfo): F[Username] def removeUser(username: Username): F[Username] } +@annotation.experimental object UserRepository { - implicit val UserRepositoryInstrument: Instrument[UserRepository] = Derive.instrument + given Aspect[UserRepository, TraceableValue, TraceableValue] = Derive.aspect def usernameForDatabase(database: Database): Username = Username(Refined.unsafeApply(database.value.value)) - def apply[F[_] : Logger : Temporal : Trace]: UserRepository[Kleisli[F, Session[F], *]] = new UserRepository[Kleisli[F, Session[F], *]] { - private implicit def kleisliLogger[A]: Logger[Kleisli[F, A, *]] = Logger[F].mapK(Kleisli.liftK) + def apply[F[_] : {Logger, Temporal, Trace}]: UserRepository[Kleisli[F, Session[F], *]] = new UserRepository[Kleisli[F, Session[F], *]] { + private given [A]: Logger[Kleisli[F, A, *]] = Logger[F].mapK(Kleisli.liftK) override def addOrUpdateUser(userConnectionInfo: UserConnectionInfo): Kleisli[F, Session[F], Username] = for { @@ -41,8 +42,7 @@ object UserRepository { private def determineCommandFor(username: Username, password: Password): Kleisli[F, Session[F], Command[Void]] = Kleisli { _ - .prepare(UserQueries.checkUserExists) - .use(_.option(username)) + .option(UserQueries.checkUserExists)(username) .flatMap { case Some(_) => Logger[F].info(s"Found and updating user named $username").as(UserQueries.updateUser(username, password)) case None => Logger[F].info(s"Creating user $username").as(UserQueries.createUser(username, password)) @@ -82,7 +82,7 @@ object UserRepository { override def removeUser(username: Username): Kleisli[F, Session[F], Username] = removeUser(username, 5) - }.withTracing + }.traceWithInputsAndOutputs } object UserQueries { diff --git a/src/main/scala/com/dwolla/tracing.scala b/src/main/scala/com/dwolla/tracing.scala deleted file mode 100644 index 9325665..0000000 --- a/src/main/scala/com/dwolla/tracing.scala +++ /dev/null @@ -1,21 +0,0 @@ -package com.dwolla - -import cats._ -import cats.effect.{Trace => _} -import cats.tagless.aop._ -import cats.tagless.syntax.all._ -import natchez._ - -object tracing { - class WithTracingSyntax[Alg[_[_]] : Instrument, F[_] : Trace](f: Alg[F]) { - private def toTraceFunctionK: Instrumentation[F, *] ~> F = new (Instrumentation[F, *] ~> F) { - override def apply[A](fa: Instrumentation[F, A]): F[A] = Trace[F].span(s"${fa.algebraName}.${fa.methodName}")(fa.value) - } - - def withTracing: Alg[F] = - Instrument[Alg].instrument(f).mapK(toTraceFunctionK) - } - - implicit def toWithTracingSyntax[Alg[_[_]] : Instrument, F[_] : Trace](f: Alg[F]): WithTracingSyntax[Alg, F] = - new WithTracingSyntax(f) -} diff --git a/src/test/scala/com/dwolla/postgres/init/ExtractRequestPropertiesSpec.scala b/src/test/scala/com/dwolla/postgres/init/ExtractRequestPropertiesSpec.scala index 0bd6d9e..c87d492 100644 --- a/src/test/scala/com/dwolla/postgres/init/ExtractRequestPropertiesSpec.scala +++ b/src/test/scala/com/dwolla/postgres/init/ExtractRequestPropertiesSpec.scala @@ -1,8 +1,9 @@ package com.dwolla.postgres.init +import com.amazonaws.secretsmanager.SecretIdType +import com.comcast.ip4s.* import io.circe.Decoder -import io.circe.literal._ -import eu.timepit.refined.auto._ +import io.circe.literal.* class ExtractRequestPropertiesSpec extends munit.FunSuite { @@ -24,12 +25,12 @@ class ExtractRequestPropertiesSpec extends munit.FunSuite { assertEquals( Decoder[DatabaseMetadata].decodeJson(input), Right(DatabaseMetadata( - Host("database-hostname"), - Port(5432), - Database("mydb"), - MasterDatabaseUsername("masterdb"), + host"database-hostname", + port"5432", + Database(SqlIdentifier.unsafeFrom("mydb")), + MasterDatabaseUsername(SqlIdentifier.unsafeFrom("masterdb")), MasterDatabasePassword("master-pass"), - List("secret1", "secret2").map(SecretId(_)), + List("secret1", "secret2").map(SecretIdType(_)), )) ) diff --git a/src/test/scala/com/dwolla/postgres/init/LocalApp.scala b/src/test/scala/com/dwolla/postgres/init/LocalApp.scala new file mode 100644 index 0000000..fd191f1 --- /dev/null +++ b/src/test/scala/com/dwolla/postgres/init/LocalApp.scala @@ -0,0 +1,72 @@ +package com.dwolla.postgres.init + +import cats.effect.* +import cats.syntax.all.* +import com.comcast.ip4s.* +import com.dwolla.buildinfo.postgres.init.BuildInfo +import com.dwolla.tracing.{DwollaEnvironment, OpenTelemetryAtDwolla} +import eu.timepit.refined.auto.* +import feral.lambda.cloudformation.* +import feral.lambda.{Context, Invocation} +import org.http4s.syntax.all.* +import org.typelevel.log4cats.LoggerFactory +import org.typelevel.log4cats.slf4j.Slf4jFactory +import com.amazonaws.secretsmanager.SecretIdType + +import scala.concurrent.duration.* + +/** + * Runs the PostgresqlDatabaseInitHandler locally using a hard-coded input message. + * + * You'll need a Postgres database running at the coordinates specified in the input message, + * and a Secrets Manager secret containing the [[UserConnectionInfo]]. Make sure to set + * up AWS credentials that have access to retrieve the secret value. + * + * View the result at the response URL. There is a webhook.site URL in the example which + * should continue to work. OTel traces will be emitted and sent to the collector configured + * via environment variables. + */ +@annotation.experimental +object LocalApp extends IOApp { + + override def run(args: List[String]): IO[ExitCode] = { + given LoggerFactory[IO] = Slf4jFactory.create[IO] + + OpenTelemetryAtDwolla[IO](BuildInfo.name, BuildInfo.version, DwollaEnvironment.Local) + .flatMap(new PostgresqlDatabaseInitHandler().handler(_)) + .use { handler => + val event = CloudFormationCustomResourceRequest[DatabaseMetadata]( + RequestType = CloudFormationRequestType.CreateRequest, + ResponseURL = uri"https://webhook.site/0df31272-1810-4b2e-aea6-e4838da33846", // view at https://webhook.site/#!/view/0df31272-1810-4b2e-aea6-e4838da33846/dff27799-9148-4c4c-8b2d-8c60bb2850f0/1 + StackId = StackId("my-stack"), + RequestId = RequestId("my-request"), + ResourceType = ResourceType("Custom::PostgresqlDatabaseInitHandler"), // TODO confirm name + LogicalResourceId = LogicalResourceId("my-resource"), + PhysicalResourceId = None, + ResourceProperties = DatabaseMetadata( + host = host"localhost", + port = port"5432", + name = Database(SqlIdentifier.unsafeFrom("transactionactivitymonitor")), + username = MasterDatabaseUsername(SqlIdentifier.unsafeFrom("root")), + password = MasterDatabasePassword(SqlIdentifier.unsafeFrom("root")), + secretIds = List("my-UserConnectionInfo-secret").map(SecretIdType(_)), + ), + OldResourceProperties = None, + ) + + val context = Context(functionName = BuildInfo.name, + functionVersion = BuildInfo.version, + invokedFunctionArn = "arn", + memoryLimitInMB = 1024, + awsRequestId = "request-id", + logGroupName = "log-group-name", + logStreamName = "log-stream-name", + identity = None, + clientContext = None, + remainingTime = 60.minutes.pure[IO]) + + handler.apply(Invocation.pure(event, context)) + } + .as(ExitCode.Success) + } +} diff --git a/src/test/scala/com/dwolla/postgres/init/SqlStringRegexSpec.scala b/src/test/scala/com/dwolla/postgres/init/SqlStringRegexSpec.scala index 3f6a153..65f5c3e 100644 --- a/src/test/scala/com/dwolla/postgres/init/SqlStringRegexSpec.scala +++ b/src/test/scala/com/dwolla/postgres/init/SqlStringRegexSpec.scala @@ -1,11 +1,12 @@ package com.dwolla.postgres.init +import cats.syntax.all.* import eu.timepit.refined.refineV import org.scalacheck.Prop.forAll import org.scalacheck.{Arbitrary, Gen, Shrink} class SqlStringRegexSpec extends munit.ScalaCheckSuite { - implicit val shrinkString: Shrink[String] = Shrink.shrinkAny + given Shrink[String] = Shrink.shrinkAny test("strings containing semicolons don't validate") { assert(refineV[GeneratedPasswordPredicate](";").isLeft) @@ -16,20 +17,20 @@ class SqlStringRegexSpec extends munit.ScalaCheckSuite { } property("sql identifiers match [A-Za-z][A-Za-z0-9_]*") { - implicit val arbString: Arbitrary[String] = Arbitrary { + given Arbitrary[String] = Arbitrary { for { initial <- Gen.alphaChar tail <- Gen.stringOf(Gen.oneOf(Gen.alphaChar, Gen.numChar, Gen.const('_'))) } yield s"$initial$tail" } - forAll { s: String => - assert(refineV[SqlIdentifierPredicate](s).map(_.value) == Right(s)) + forAll { (s: String) => + assertEquals(refineV[SqlIdentifierPredicate](s).map(_.value), s.asRight) } } property("passwords contain the allowed characters") { - implicit val arbString: Arbitrary[String] = Arbitrary { + given Arbitrary[String] = Arbitrary { val allowedPunctuation: Gen[Char] = Gen.oneOf("""! " # $ % & ( ) * + , - . / : < = > ? @ [ \ ] ^ _ { | } ~ """.replaceAll(" ", "").toList) val allowedCharacters: Gen[Char] = Gen.oneOf(Gen.alphaChar, Gen.numChar, allowedPunctuation) @@ -39,8 +40,8 @@ class SqlStringRegexSpec extends munit.ScalaCheckSuite { } yield s"$initial$tail" } - forAll { s: String => - assert(refineV[GeneratedPasswordPredicate](s).map(_.value) == Right(s)) + forAll { (s: String) => + assertEquals(refineV[GeneratedPasswordPredicate](s).map(_.value), s.asRight) } } }