Skip to content

Commit

Permalink
Merge d7a507b into c4864bb
Browse files Browse the repository at this point in the history
  • Loading branch information
oguzhanunlu committed May 25, 2023
2 parents c4864bb + d7a507b commit 80050d8
Show file tree
Hide file tree
Showing 36 changed files with 1,196 additions and 353 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
Version 3.3.0 (2023-05-25)
--------------------------
Take superseding schema into account during validation (#231)
Apps should explicitly import java.net.http.HttpClient instances of RegistryLookup (#241)
Resolver caches suffers from races and http server overload during the cold start (#227)
Ignore `$ref` keyword referencing HTTP resources (#238)

Version 2.2.1 (2023-01-24)
--------------------------
Update links in Readme (#205)
Expand Down
3 changes: 1 addition & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,7 @@ import cats.syntax.show._

import com.snowplowanalytics.iglu.client.Client
import com.snowplowanalytics.iglu.core.{SchemaKey, SchemaVer, SelfDescribingData}

implicit val clockIoInstance: Clock[IO] = Clock.create[IO] // Usually provided by IOApp
import com.snowplowanalytics.iglu.client.resolver.registries.JavaNetRegistryLookup._

val resolverConfig: Json = json"""{
"schema": "iglu:com.snowplowanalytics.iglu/resolver-config/jsonschema/1-0-1",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import cats.data.EitherT
import cats.effect.{Clock, IO}
import io.circe.{DecodingFailure, Json}
import com.snowplowanalytics.iglu.core.SelfDescribingData
import resolver.{InitListCache, InitSchemaCache}
import resolver.CreateResolverCache
import resolver.registries.{Registry, RegistryLookup}

/**
Expand All @@ -39,9 +39,9 @@ final case class Client[F[_], A](resolver: Resolver[F], validator: Validator[A])
for {
schema <- EitherT(resolver.lookupSchema(instance.schema))
schemaValidation = validator.validateSchema(schema)
_ <- EitherT.fromEither[F](schemaValidation).leftMap(_.toClientError)
_ <- EitherT.fromEither[F](schemaValidation).leftMap(_.toClientError(None))
validation = validator.validate(instance.data, schema)
_ <- EitherT.fromEither[F](validation).leftMap(_.toClientError)
_ <- EitherT.fromEither[F](validation).leftMap(_.toClientError(None))
} yield ()
}

Expand All @@ -51,7 +51,7 @@ object Client {
val IgluCentral: Client[IO, Json] =
Client[IO, Json](Resolver(List(Registry.IgluCentral), None), CirceValidator)

def parseDefault[F[_]: Monad: InitSchemaCache: InitListCache](
def parseDefault[F[_]: Monad: CreateResolverCache](
json: Json
): EitherT[F, DecodingFailure, Client[F, Json]] =
EitherT(Resolver.parse[F](json)).map { resolver =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ import cats.data.EitherT
import cats.effect.Clock
import cats.implicits._
import com.snowplowanalytics.iglu.client.resolver.registries.RegistryLookup
import com.snowplowanalytics.iglu.client.resolver.{InitListCache, InitSchemaCache}
import com.snowplowanalytics.iglu.client.resolver.CreateResolverCache
import com.snowplowanalytics.iglu.client.resolver.Resolver.SupersededBy
import com.snowplowanalytics.iglu.client.validator.CirceValidator.WithCaching.{
InitValidatorCache,
SchemaEvaluationCache,
Expand All @@ -43,18 +44,24 @@ final class IgluCirceClient[F[_]] private (
M: Monad[F],
L: RegistryLookup[F],
C: Clock[F]
): EitherT[F, ClientError, Unit] =
): EitherT[F, ClientError, SupersededBy] =
for {
resolverResult <- EitherT(resolver.lookupSchemaResult(instance.schema))
resolverResult <- EitherT(
resolver.lookupSchemaResult(instance.schema, resolveSupersedingSchema = true)
)
validation =
CirceValidator.WithCaching.validate(schemaEvaluationCache)(instance.data, resolverResult)
_ <- EitherT(validation).leftMap(_.toClientError)
} yield ()
_ <- EitherT(validation).leftMap(e =>
e.toClientError(resolverResult.value.supersededBy.map(_.asString))
)
// Returning superseding schema info as well since we want to inform caller that sdj is validated
// against superseding schema if it is superseded by another schema.
} yield resolverResult.value.supersededBy
}

object IgluCirceClient {

def parseDefault[F[_]: Monad: InitSchemaCache: InitListCache: InitValidatorCache](
def parseDefault[F[_]: Monad: CreateResolverCache: InitValidatorCache](
json: Json
): EitherT[F, DecodingFailure, IgluCirceClient[F]] =
for {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Copyright (c) 2018-2023 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.iglu.client.resolver

import cats.Id
import cats.effect.Async
import cats.effect.std.Mutex
import cats.implicits._
import com.snowplowanalytics.lrumap.{CreateLruMap, LruMap}
import com.snowplowanalytics.iglu.core.SchemaKey

trait CreateResolverCache[F[_]] {

def createSchemaCache(size: Int): F[LruMap[F, SchemaKey, SchemaCacheEntry]]

def createSchemaListCache(size: Int): F[LruMap[F, ListCacheKey, ListCacheEntry]]

def createMutex[K]: F[ResolverMutex[F, K]]

}

object CreateResolverCache {

def apply[F[_]](implicit instance: CreateResolverCache[F]): CreateResolverCache[F] = instance

private trait SimpleCreateResolverCache[F[_]] extends CreateResolverCache[F] {

def createLruMap[K, V](size: Int): F[LruMap[F, K, V]]

override def createSchemaCache(size: Int): F[LruMap[F, SchemaKey, SchemaCacheEntry]] =
createLruMap(size)

override def createSchemaListCache(size: Int): F[LruMap[F, ListCacheKey, ListCacheEntry]] =
createLruMap(size)

}

implicit def idCreateResolverCache: CreateResolverCache[Id] =
new SimpleCreateResolverCache[Id] {
def createLruMap[K, V](size: Int): LruMap[Id, K, V] =
CreateLruMap[Id, K, V].create(size)

def createMutex[K]: ResolverMutex[Id, K] =
ResolverMutex.idResolverMutex(new Object, createLruMap[K, Object](1000))
}

implicit def asyncCreateResolverCache[F[_]: Async]: CreateResolverCache[F] =
new SimpleCreateResolverCache[F] {

def createLruMap[K, V](size: Int): F[LruMap[F, K, V]] =
CreateLruMap[F, K, V].create(size)

def createMutex[K]: F[ResolverMutex[F, K]] =
for {
mutex <- Mutex[F]
lrumap <- createLruMap[K, Mutex[F]](1000)
} yield ResolverMutex.asyncResolverMutex(mutex, lrumap)
}
}

0 comments on commit 80050d8

Please sign in to comment.