diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 0f790413..7dd99109 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -23,8 +23,8 @@ permissions:
jobs:
test:
- runs-on: self-hosted
-# runs-on: ubuntu-latest
+# runs-on: self-hosted
+ runs-on: ubuntu-latest
env:
# define Java options for both official sbt and sbt-extras
JAVA_OPTS: -Xms2048M -Xmx2048M -Xss6M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8
@@ -38,12 +38,14 @@ jobs:
java-version: '8'
distribution: 'temurin'
# cache: 'sbt'
+ - name: Setup sbt launcher
+ uses: sbt/setup-sbt@v1
- name: Run tests & Coverage Report
run: sbt coverage test coverageReport
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
- files: common/target/scala-2.12/coverage-report/cobertura.xml,core/target/scala-2.12/coverage-report/cobertura.xml,teskit/target/scala-2.12/coverage-report/cobertura.xml
+ files: common/target/scala-2.12/coverage-report/cobertura.xml,common/testkit/target/scala-2.12/coverage-report/cobertura.xml,core/target/scala-2.12/coverage-report/cobertura.xml,core/teskit/target/scala-2.12/coverage-report/cobertura.xml,jdbc/target/scala-2.12/coverage-report/cobertura.xml,jdbc/teskit/target/scala-2.12/coverage-report/cobertura.xml,counter/target/scala-2.12/coverage-report/cobertura.xml,kv/target/scala-2.12/coverage-report/cobertura.xml,kv/target/scala-2.12/coverage-report/cobertura.xml,session/testkit/target/scala-2.12/coverage-report/cobertura.xml
flags: unittests
fail_ci_if_error: true
verbose: true
@@ -59,5 +61,7 @@ jobs:
java-version: '8'
distribution: 'temurin'
# cache: 'sbt'
+ - name: Setup sbt launcher
+ uses: sbt/setup-sbt@v1
- name: Formatting
- run: sbt scalafmtSbtCheck scalafmtCheck test:scalafmtCheck
\ No newline at end of file
+ run: sbt scalafmtSbtCheck scalafmtCheck Test/scalafmtCheck
\ No newline at end of file
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 10f850ab..2903fd6a 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -20,8 +20,8 @@ permissions:
jobs:
release:
- runs-on: self-hosted
-# runs-on: ubuntu-latest
+# runs-on: self-hosted
+ runs-on: ubuntu-latest
env:
# define Java options for both official sbt and sbt-extras
JAVA_OPTS: -Xms2048M -Xmx2048M -Xss6M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8
@@ -35,17 +35,19 @@ jobs:
java-version: '8'
distribution: 'temurin'
# cache: 'sbt'
+ - name: Setup sbt launcher
+ uses: sbt/setup-sbt@v1
- name: Run tests & Coverage Report
run: sbt coverage test coverageReport coverageAggregate
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
- files: common/target/scala-2.12/coverage-report/cobertura.xml,common/testkit/target/scala-2.12/coverage-report/cobertura.xml,core/target/scala-2.12/coverage-report/cobertura.xml,core/teskit/target/scala-2.12/coverage-report/cobertura.xml,jdbc/target/scala-2.12/coverage-report/cobertura.xml,jdbc/teskit/target/scala-2.12/coverage-report/cobertura.xml,elastic/target/scala-2.12/coverage-report/cobertura.xml,elastic/teskit/target/scala-2.12/coverage-report/cobertura.xml,counter/target/scala-2.12/coverage-report/cobertura.xml,server/testkit/target/scala-2.12/coverage-report/cobertura.xml,session/testkit/target/scala-2.12/coverage-report/cobertura.xml
+ files: common/target/scala-2.12/coverage-report/cobertura.xml,common/testkit/target/scala-2.12/coverage-report/cobertura.xml,core/target/scala-2.12/coverage-report/cobertura.xml,core/teskit/target/scala-2.12/coverage-report/cobertura.xml,jdbc/target/scala-2.12/coverage-report/cobertura.xml,jdbc/teskit/target/scala-2.12/coverage-report/cobertura.xml,counter/target/scala-2.12/coverage-report/cobertura.xml,kv/target/scala-2.12/coverage-report/cobertura.xml,kv/target/scala-2.12/coverage-report/cobertura.xml,session/testkit/target/scala-2.12/coverage-report/cobertura.xml
flags: unittests
fail_ci_if_error: false
verbose: true
- name: Publish
- run: sbt publish
+ run: sbt + publish
lint:
runs-on: ubuntu-latest
@@ -58,5 +60,7 @@ jobs:
java-version: '8'
distribution: 'temurin'
# cache: 'sbt'
+ - name: Setup sbt launcher
+ uses: sbt/setup-sbt@v1
- name: Formatting
- run: sbt scalafmtSbtCheck scalafmtCheck test:scalafmtCheck
\ No newline at end of file
+ run: sbt scalafmtSbtCheck scalafmtCheck Test/scalafmtCheck
\ No newline at end of file
diff --git a/build.sbt b/build.sbt
index e5744267..630ff75c 100644
--- a/build.sbt
+++ b/build.sbt
@@ -4,17 +4,37 @@ import app.softnetwork.*
// Defaults
/////////////////////////////////
+lazy val scala212 = "2.12.20"
+lazy val scala213 = "2.13.16"
+lazy val javacCompilerVersion = "1.8"
+lazy val scalacCompilerOptions = Seq(
+ "-deprecation",
+ "-feature",
+ s"-target:jvm-$javacCompilerVersion"
+)
+
ThisBuild / organization := "app.softnetwork"
name := "generic-persistence-api"
-ThisBuild / version := "0.7.3"
+ThisBuild / version := "0.8-SNAPSHOT"
+
+lazy val moduleSettings = Seq(
+ crossScalaVersions := Seq(scala212, scala213),
+ scalacOptions ++= {
+ CrossVersion.partialVersion(scalaVersion.value) match {
+ case Some((2, 12)) => scalacCompilerOptions :+ "-Ypartial-unification"
+ case Some((2, 13)) => scalacCompilerOptions
+ case _ => Seq.empty
+ }
+ }
+)
-ThisBuild / scalaVersion := "2.12.18"
+ThisBuild / javacOptions ++= Seq("-source", javacCompilerVersion, "-target", javacCompilerVersion)
-ThisBuild / scalacOptions ++= Seq("-deprecation", "-feature", "-target:jvm-1.8", "-Ypartial-unification")
+ThisBuild / scalaVersion := scala212
-ThisBuild / javacOptions ++= Seq("-source", "1.8", "-target", "1.8")
+//ThisBuild / versionScheme := Some("early-semver")
ThisBuild / resolvers ++= Seq(
"Softnetwork Server" at "https://softnetwork.jfrog.io/artifactory/releases/",
@@ -24,20 +44,32 @@ ThisBuild / resolvers ++= Seq(
ThisBuild / libraryDependencies ++= Seq(
"com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf",
- "org.scala-lang.modules" %% "scala-parser-combinators" % "1.1.1"
+ "org.scala-lang.modules" %% "scala-parser-combinators" % "1.1.2"
)
ThisBuild / libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always
+ThisBuild / dependencyOverrides ++= Seq(
+ "com.github.jnr" % "jnr-ffi" % "2.2.17",
+ "com.github.jnr" % "jffi" % "1.3.13" classifier "native",
+ "org.lmdbjava" % "lmdbjava" % "0.9.1" exclude("org.slf4j", "slf4j-api"),
+)
+
Test / parallelExecution := false
lazy val common = project.in(file("common"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
lazy val commonTestkit = project.in(file("common/testkit"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
.dependsOn(
common % "compile->compile;test->test;it->it"
)
@@ -45,14 +77,21 @@ lazy val commonTestkit = project.in(file("common/testkit"))
lazy val core = project.in(file("core"))
.configs(IntegrationTest)
.enablePlugins(BuildInfoPlugin)
- .settings(Defaults.itSettings, app.softnetwork.Info.infoSettings)
+ .settings(
+ Defaults.itSettings,
+ app.softnetwork.Info.infoSettings,
+ moduleSettings
+ )
.dependsOn(
common % "compile->compile;test->test;it->it"
)
lazy val coreTestkit = project.in(file("core/testkit"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
.dependsOn(
core % "compile->compile;test->test;it->it"
)
@@ -62,7 +101,10 @@ lazy val coreTestkit = project.in(file("core/testkit"))
lazy val server = project.in(file("server"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
.enablePlugins(AkkaGrpcPlugin)
.dependsOn(
core % "compile->compile;test->test;it->it"
@@ -70,7 +112,10 @@ lazy val server = project.in(file("server"))
lazy val serverTestkit = project.in(file("server/testkit"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
.dependsOn(
server % "compile->compile;test->test;it->it"
)
@@ -80,7 +125,10 @@ lazy val serverTestkit = project.in(file("server/testkit"))
lazy val sessionCommon = project.in(file("session/common"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
.enablePlugins(AkkaGrpcPlugin)
.dependsOn(
server % "compile->compile;test->test;it->it"
@@ -88,14 +136,20 @@ lazy val sessionCommon = project.in(file("session/common"))
lazy val sessionCore = project.in(file("session/core"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
.dependsOn(
sessionCommon % "compile->compile;test->test;it->it"
)
lazy val sessionTestkit = project.in(file("session/testkit"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
.dependsOn(
sessionCore % "compile->compile;test->test;it->it"
)
@@ -105,14 +159,20 @@ lazy val sessionTestkit = project.in(file("session/testkit"))
lazy val jdbc = project.in(file("jdbc"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
.dependsOn(
core % "compile->compile;test->test;it->it"
)
lazy val jdbcTestkit = project.in(file("jdbc/testkit"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
.dependsOn(
jdbc % "compile->compile;test->test;it->it"
)
@@ -122,47 +182,33 @@ lazy val jdbcTestkit = project.in(file("jdbc/testkit"))
lazy val cassandra = project.in(file("cassandra"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
.dependsOn(
core % "compile->compile;test->test;it->it"
)
lazy val counter = project.in(file("counter"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
- .dependsOn(
- core % "compile->compile;test->test;it->it"
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
)
- .dependsOn(
- coreTestkit % "test->test;it->it"
- )
-
-lazy val elastic = project.in(file("elastic"))
- .configs(IntegrationTest)
- .settings(Defaults.itSettings)
.dependsOn(
core % "compile->compile;test->test;it->it"
)
-
-lazy val elasticTestkit = project.in(file("elastic/testkit"))
- .configs(IntegrationTest)
- .settings(Defaults.itSettings)
- .dependsOn(
- elastic % "compile->compile;test->test;it->it"
- )
.dependsOn(
- commonTestkit % "compile->compile;test->test;it->it"
- )
- .dependsOn(
- coreTestkit % "compile->compile;test->test;it->it"
- )
- .dependsOn(
- jdbcTestkit % "compile->compile;test->test;it->it"
+ coreTestkit % "test->test;it->it"
)
lazy val kv = project.in(file("kv"))
.configs(IntegrationTest)
- .settings(Defaults.itSettings)
+ .settings(
+ Defaults.itSettings,
+ moduleSettings
+ )
.enablePlugins(AkkaGrpcPlugin)
.dependsOn(
core % "compile->compile;test->test;it->it"
@@ -181,8 +227,6 @@ lazy val root = project.in(file("."))
jdbcTestkit,
// cassandra,
counter,
- elastic,
- elasticTestkit,
kv,
server,
serverTestkit,
@@ -191,7 +235,11 @@ lazy val root = project.in(file("."))
sessionTestkit
)
.configs(IntegrationTest)
- .settings(Defaults.itSettings, Publish.noPublishSettings)
+ .settings(
+ Defaults.itSettings,
+ Publish.noPublishSettings,
+ crossScalaVersions := Nil
+ )
Test / envVars := Map(
"POSTGRES_USER" -> "admin",
diff --git a/common/build.sbt b/common/build.sbt
index 24886f0d..75679b45 100644
--- a/common/build.sbt
+++ b/common/build.sbt
@@ -10,14 +10,22 @@ val configDependencies = Seq(
)
val jackson = Seq(
- "com.fasterxml.jackson.core" % "jackson-databind" % Versions.jackson,
- "com.fasterxml.jackson.core" % "jackson-core" % Versions.jackson,
- "com.fasterxml.jackson.core" % "jackson-annotations" % Versions.jackson,
- "com.fasterxml.jackson.module" % "jackson-module-scala_2.12" % Versions.jackson
+ "com.fasterxml.jackson.core" % "jackson-databind" % Versions.jackson,
+ "com.fasterxml.jackson.core" % "jackson-core" % Versions.jackson,
+ "com.fasterxml.jackson.core" % "jackson-annotations" % Versions.jackson,
+ "com.fasterxml.jackson.dataformat" % "jackson-dataformat-cbor" % Versions.jackson,
+ "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % Versions.jackson,
+ "com.fasterxml.jackson.datatype" % "jackson-datatype-jdk8" % Versions.jackson,
+ "com.fasterxml.jackson.datatype" % "jackson-datatype-jsr310" % Versions.jackson,
+ "com.fasterxml.jackson.module" % "jackson-module-parameter-names" % Versions.jackson,
+ "com.fasterxml.jackson.module" %% "jackson-module-scala" % Versions.jackson,
)
val jacksonExclusions = Seq(
ExclusionRule(organization = "com.fasterxml.jackson.core"),
+ ExclusionRule(organization = "com.fasterxml.jackson.dataformat"),
+ ExclusionRule(organization = "com.fasterxml.jackson.datatype"),
+ ExclusionRule(organization = "com.fasterxml.jackson.module"),
ExclusionRule(organization = "org.codehaus.jackson")
)
diff --git a/common/src/main/scala/mustache/package.scala b/common/src/main/scala-2.12/mustache/package.scala
similarity index 100%
rename from common/src/main/scala/mustache/package.scala
rename to common/src/main/scala-2.12/mustache/package.scala
diff --git a/common/src/main/scala-2.13/mustache/package.scala b/common/src/main/scala-2.13/mustache/package.scala
new file mode 100644
index 00000000..b5ab62e6
--- /dev/null
+++ b/common/src/main/scala-2.13/mustache/package.scala
@@ -0,0 +1,666 @@
+package mustache
+
+import scala.annotation.tailrec
+import scala.io.Source
+import scala.concurrent.{Await, Awaitable}
+import scala.concurrent.duration._
+
+/** @author
+ * vspy - https://github.com/vspy/scala-mustache
+ */
+import java.io.{File => JFile}
+
+ import app.softnetwork.config.Settings
+ import org.apache.commons.text.StringEscapeUtils
+
+ case class MustacheParseException(line: Int, msg: String)
+ extends Exception("Line " + line + ": " + msg)
+
+ /** view helper trait
+ */
+ trait MustacheHelperSupport {
+ private val contextLocal = new java.lang.ThreadLocal[Any]()
+ private val renderLocal = new java.lang.ThreadLocal[Function1[String, String]]()
+
+ protected def context: Any = contextLocal.get
+ protected def render(template: String): Any =
+ renderLocal.get()(template)
+
+ def withContextAndRenderFn[A](context: Any, render: String => String)(fn: => A): A = {
+ contextLocal.set(context)
+ renderLocal.set(render)
+ try { fn }
+ finally {
+ contextLocal.set(null)
+ renderLocal.set(null)
+ }
+ }
+ }
+
+ /** template
+ */
+ class Mustache(
+ root: Token
+ ) extends MustacheHelperSupport {
+
+ def this(source: Source, open: String = "{{", close: String = "}}") =
+ this(new Parser {
+ val src: Source = source
+ var otag: String = open
+ var ctag: String = close
+ }.parse())
+
+ def this(str: String) = this(Source.fromString(str))
+
+ def this(
+ str: String,
+ open: String,
+ close: String
+ ) = this(Source.fromString(str), open, close)
+
+ private val compiledTemplate = root
+
+ val globals: Map[String, Any] = {
+ val excludedGlobals = List("wait", "toString", "hashCode", "getClass", "notify", "notifyAll")
+ Map(
+ this.getClass.getMethods
+ .filter(x => {
+ val name = x.getName
+ val pt = x.getParameterTypes
+ (!name.startsWith("render$default")) && (
+ !name.startsWith("product$default")
+ ) && (
+ !name.startsWith("init$default")
+ ) && (
+ !excludedGlobals.contains(name)
+ ) && (pt.isEmpty || (
+ pt.length == 1
+ && pt(0) == classOf[String]
+ ))
+ })
+ .toIndexedSeq.map(x => {
+ x.getName ->
+ (if (x.getParameterTypes.isEmpty) () => {
+ x.invoke(this)
+ }
+ else
+ (str: String) => {
+ x.invoke(this, str)
+ })
+ }): _*
+ )
+ }
+
+ def renderHtml4(
+ context: Any = null,
+ partials: Map[String, Mustache] = Map(),
+ callstack: List[Any] = List(this)
+ ): String = {
+ context match {
+ case m: Map[String, Any] =>
+ product(
+ m.map(kv =>
+ kv._2 match {
+ case s: String => kv._1 -> StringEscapeUtils.escapeHtml4(s)
+ case v => kv._1 -> v
+ }
+ ),
+ partials,
+ callstack
+ ).toString
+ case _ => product(context, partials, callstack).toString
+ }
+ }
+
+ def render(
+ context: Any = null,
+ partials: Map[String, Mustache] = Map(),
+ callstack: List[Any] = List(this)
+ ): String = product(context, partials, callstack).toString
+
+ def product(
+ context: Any = null,
+ partials: Map[String, Mustache] = Map(),
+ callstack: List[Any] = List(this)
+ ): TokenProduct = compiledTemplate.render(context, partials, callstack)
+
+ }
+
+ private class ParserState
+ private object Text extends ParserState
+ private object OTag extends ParserState
+ private object Tag extends ParserState
+ private object CTag extends ParserState
+
+ private abstract class Parser {
+ val src: Source
+
+ var state: ParserState = Text
+ var otag: String
+ var ctag: String
+ var tagPosition: Int = 0
+ var line: Int = 1
+ var prev: Char = '\uffff'
+ var cur: Char = '\uffff'
+ var curlyBraceTag: Boolean = false
+ var stack: List[Token] = List()
+
+ val buf = new StringBuilder(8192)
+
+ def parse(): Token = {
+ while (consume) {
+ state match {
+ case Text =>
+ if (cur == otag.charAt(0))
+ if (otag.length > 1) { tagPosition = 1; state = OTag }
+ else { staticText(); state = Tag }
+ else buf.append(cur)
+
+ case OTag =>
+ if (cur == otag.charAt(tagPosition))
+ if (tagPosition == otag.length - 1) { staticText(); state = Tag }
+ else { tagPosition = tagPosition + 1 }
+ else { notOTag(); buf.append(cur) }
+
+ case Tag =>
+ if (buf.isEmpty && cur == '{') {
+ curlyBraceTag = true
+ buf.append(cur)
+ } else if (curlyBraceTag && cur == '}') {
+ curlyBraceTag = false
+ buf.append(cur)
+ } else if (cur == ctag.charAt(0)) {
+ if (ctag.length > 1) { tagPosition = 1; state = CTag }
+ else tag()
+ } else buf.append(cur)
+
+ case CTag =>
+ if (cur == ctag.charAt(tagPosition)) {
+ if (tagPosition == ctag.length - 1) tag()
+ else { tagPosition = tagPosition + 1 }
+ } else { notCTag(); buf.append(cur) }
+ }
+ }
+ state match {
+ case Text => staticText()
+ case OTag => notOTag(); staticText()
+ case Tag => fail("Unclosed tag \"" + buf.toString + "\"")
+ case CTag => notCTag(); staticText()
+ }
+ stack.foreach {
+ case IncompleteSection(key, _, _, _) => fail("Unclosed mustache section \"" + key + "\"")
+ case _ =>
+ }
+ val result = stack.reverse
+
+ if (result.size == 1) result.head
+ else RootToken(result)
+ }
+
+ private def fail[A](msg: String): A = throw MustacheParseException(line, msg)
+
+ private def consume = {
+ prev = cur
+
+ if (src.hasNext) {
+ cur = src.next()
+ // \n, \r\n, \r
+ if (
+ cur == '\r' ||
+ (cur == '\n' && prev != '\r')
+ ) line = line + 1
+ true
+ } else false
+ }
+
+ private def notOTag(): Unit = {
+ buf.append(otag.substring(0, tagPosition))
+ state = Text
+ }
+ private def notCTag(): Unit = {
+ buf.append(ctag.substring(0, tagPosition))
+ state = Tag
+ }
+ private def reduce: String = { val r = buf.toString; buf.clear(); r }
+
+ private def staticText(): Unit = {
+ val r = reduce
+ if (r.nonEmpty) stack = StaticTextToken(r) :: stack
+ }
+
+ private def checkContent(content: String): String = {
+ val trimmed = content.trim
+ if (trimmed.isEmpty) fail("Empty tag")
+ else trimmed
+ }
+
+ private def tag(): Unit = {
+ state = Text
+ val content = checkContent(reduce)
+ def skipFirst = checkContent(content substring 1)
+ def skipBoth = checkContent(content substring (1, content.length - 1))
+
+ content.charAt(0) match {
+ case '!' => // ignore comments
+ case '&' =>
+ stack = UnescapedToken(skipFirst, otag, ctag) :: stack
+ case '{' =>
+ if (content endsWith "}")
+ stack = UnescapedToken(skipBoth, otag, ctag) :: stack
+ else fail("Unbalanced \"{\" in tag \"" + content + "\"")
+ case '^' =>
+ stack = IncompleteSection(skipFirst, inverted = true, otag = otag, ctag = ctag) :: stack
+ case '#' =>
+ stack = IncompleteSection(skipFirst, inverted = false, otag, ctag) :: stack
+ case '/' =>
+ val name = skipFirst
+
+ @tailrec
+ def addSection(
+ children: List[Token],
+ s: List[Token]
+ ): List[Token] = s.headOption match {
+ case None => fail("Closing unopened section \"" + name + "\"")
+
+ case Some(IncompleteSection(key, inverted, startOTag, startCTag)) if key == name =>
+ SectionToken(inverted, name, children, startOTag, startCTag, otag, ctag) :: s.tail
+
+ case Some(IncompleteSection(key, _, _, _)) if key != name =>
+ fail("Unclosed section \"" + key + "\"")
+
+ case Some(other) =>
+ addSection(other :: children, s.tail)
+ }
+ stack = addSection(List[Token](), stack)
+ case '>' | '<' =>
+ stack = PartialToken(skipFirst, otag, ctag) :: stack
+ case '=' =>
+ if (content.length > 2 && content.endsWith("=")) {
+ val changeDelimiter = skipBoth
+ changeDelimiter.split("""\s+""", -1).toSeq match {
+ case Seq(o, c) =>
+ stack = ChangeDelimitersToken(o, c, otag, ctag) :: stack
+ otag = o
+ ctag = c
+ case _ => fail("Invalid change delimiter tag content: \"" + changeDelimiter + "\"")
+ }
+ } else
+ fail("Invalid change delimiter tag content: \"" + content + "\"")
+ case _ => stack = EscapedToken(content, otag, ctag) :: stack
+ }
+ }
+ }
+
+ // mustache tokens ------------------------------------------
+ trait TokenProduct {
+ val maxLength: Int
+ def write(out: StringBuilder): Unit
+
+ override def toString: String = {
+ val b = new StringBuilder(maxLength)
+ write(b)
+ b.toString
+ }
+ }
+
+ object EmptyProduct extends TokenProduct {
+ val maxLength = 0
+ def write(out: StringBuilder): Unit = {}
+ }
+
+ case class StringProduct(str: String) extends TokenProduct {
+ val maxLength: Int = str.length
+ def write(out: StringBuilder): Unit = out.append(str)
+ }
+
+ trait Token {
+ def render(context: Any, partials: Map[String, Mustache], callstack: List[Any]): TokenProduct
+ def templateSource: String
+ }
+
+ trait CompositeToken {
+ def composite(
+ tokens: List[Token],
+ context: Any,
+ partials: Map[String, Mustache],
+ callstack: List[Any]
+ ): TokenProduct =
+ composite(tokens.map { (_, context) }, partials, callstack)
+
+ def composite(
+ tasks: Seq[(Token, Any)],
+ partials: Map[String, Mustache],
+ callstack: List[Any]
+ ): TokenProduct = {
+ val result = tasks.map(t => { t._1.render(t._2, partials, callstack) })
+ val len = result.foldLeft(0) { _ + _.maxLength }
+ new TokenProduct {
+ val maxLength: Int = len
+ def write(out: StringBuilder): Unit = result.foreach { _.write(out) }
+ }
+ }
+ }
+
+ case class RootToken(children: List[Token]) extends Token with CompositeToken {
+ private val childrenSource = children.map(_.templateSource).mkString
+
+ def render(context: Any, partials: Map[String, Mustache], callstack: List[Any]): TokenProduct =
+ composite(children, context, partials, callstack)
+
+ def templateSource: String = childrenSource
+ }
+
+ case class IncompleteSection(key: String, inverted: Boolean, otag: String, ctag: String)
+ extends Token {
+ def render(context: Any, partials: Map[String, Mustache], callstack: List[Any]): TokenProduct =
+ fail
+ def templateSource: String = fail
+
+ private def fail =
+ throw new Exception("Weird thing happened. There is incomplete section in compiled template.")
+
+ }
+
+ case class StaticTextToken(staticText: String) extends Token {
+ private val product = StringProduct(staticText)
+
+ def render(context: Any, partials: Map[String, Mustache], callstack: List[Any]): TokenProduct =
+ product
+
+ def templateSource: String = staticText
+
+ }
+
+ case class ChangeDelimitersToken(
+ newOTag: String,
+ newCTag: String,
+ otag: String,
+ ctag: String
+ ) extends Token {
+ private val source = otag + "=" + newOTag + " " + newCTag + "=" + ctag
+
+ def render(context: Any, partials: Map[String, Mustache], callstack: List[Any]): TokenProduct =
+ EmptyProduct
+
+ def templateSource: String = source
+
+ }
+
+ case class PartialToken(key: String, otag: String, ctag: String) extends Token {
+ def render(context: Any, partials: Map[String, Mustache], callstack: List[Any]): TokenProduct =
+ partials.get(key) match {
+ case Some(template) => template.product(context, partials, template :: callstack)
+ case _ => throw new IllegalArgumentException("Partial \"" + key + "\" is not defined.")
+ }
+ def templateSource: String = otag + ">" + key + ctag
+ }
+
+ trait ContextHandler {
+
+ protected def defaultRender(
+ otag: String,
+ ctag: String
+ ): (Any, Map[String, Mustache], List[Any]) => String => String =
+ (context: Any, partials: Map[String, Mustache], callstack: List[Any]) =>
+ (str: String) => {
+ val t = new Mustache(str, otag, ctag)
+ t.render(context, partials, callstack)
+ }
+
+ def valueOf(
+ key: String,
+ context: Any,
+ partials: Map[String, Mustache],
+ callstack: List[Any],
+ childrenString: String,
+ render: (Any, Map[String, Mustache], List[Any]) => String => String
+ ): Any = {
+ val r = render(context, partials, callstack)
+
+ val wrappedEval =
+ callstack
+ .filter(_.isInstanceOf[Mustache])
+ .asInstanceOf[List[Mustache]]
+ .foldLeft(() => { eval(findInContext(context :: callstack, key), childrenString, r) })(
+ (f, e) => { () => { e.withContextAndRenderFn(context, r)(f()) } }
+ )
+ wrappedEval() match {
+ case None if key == "." => context
+ case other => other
+ }
+ }
+
+ @tailrec
+ private def eval(
+ value: Any,
+ childrenString: String,
+ render: String => String
+ ): Any =
+ value match {
+ case Some(someValue) => eval(someValue, childrenString, render)
+
+ case a: Awaitable[_] =>
+ eval(Await.result(a, Duration.Inf), childrenString, render)
+
+ case f: Function0[_] =>
+ eval(f(), childrenString, render)
+
+ case s: Seq[_] => s
+
+ case m: Map[_, _] => m
+
+ case f: Function1[String, _] =>
+ eval(f(childrenString), childrenString, render)
+
+ case f: Function2[String, Function1[String, String], _] =>
+ eval(f(childrenString, render), childrenString, render)
+
+ case other => other
+ }
+
+ @tailrec
+ private def findInContext(stack: List[Any], key: String): Any =
+ stack.headOption match {
+ case None => None
+ case Some(head) =>
+ (head match {
+ case null => None
+ case m: Map[String, _] =>
+ m.get(key) match {
+ case Some(v) => v
+ case None => None
+ }
+ case m: Mustache =>
+ m.globals.get(key) match {
+ case Some(v) => v
+ case None => None
+ }
+ case any => reflection(any, key)
+ }) match {
+ case None => findInContext(stack.tail, key)
+ case x => x
+ }
+ }
+
+ private def reflection(x: Any, key: String): Any = {
+ val w = wrapped(x)
+ (methods(w).get(key), fields(w).get(key)) match {
+ case (Some(m), _) => m.invoke(w)
+ case (None, Some(f)) => f.get(w)
+ case _ => None
+ }
+ }
+
+ private def fields(w: AnyRef) = Map(
+ w.getClass.getFields.toIndexedSeq.map(x => { x.getName -> x }): _*
+ )
+
+ private def methods(w: AnyRef) = Map(
+ w.getClass.getMethods
+ .filter(x => { x.getParameterTypes.isEmpty })
+ .toIndexedSeq.map(x => { x.getName -> x }): _*
+ )
+
+ private def wrapped(x: Any): AnyRef =
+ x match {
+ case x: Byte => byte2Byte(x)
+ case x: Short => short2Short(x)
+ case x: Char => char2Character(x)
+ case x: Int => int2Integer(x)
+ case x: Long => long2Long(x)
+ case x: Float => float2Float(x)
+ case x: Double => double2Double(x)
+ case x: Boolean => boolean2Boolean(x)
+ case _ => x.asInstanceOf[AnyRef]
+ }
+ }
+
+ trait ValuesFormatter {
+ @tailrec
+ final def format(value: Any): String =
+ value match {
+ case null => ""
+ case None => ""
+ case Some(v) => format(v)
+ case x => x.toString
+ }
+ }
+
+ case class SectionToken(
+ inverted: Boolean,
+ key: String,
+ children: List[Token],
+ startOTag: String,
+ startCTag: String,
+ endOTag: String,
+ endCTag: String
+ ) extends Token
+ with ContextHandler
+ with CompositeToken {
+
+ private val childrenSource = children.map(_.templateSource).mkString
+
+ private val source = startOTag + (if (inverted) "^" else "#") + key +
+ startCTag + childrenSource + endOTag + "/" + key + endCTag
+
+ private val childrenTemplate = {
+ val root =
+ if (children.size == 1) children.head
+ else RootToken(children)
+ new Mustache(root)
+ }
+
+ def render(context: Any, partials: Map[String, Mustache], callstack: List[Any]): TokenProduct =
+ valueOf(key, context, partials, callstack, childrenSource, renderContent) match {
+ case null =>
+ if (inverted) composite(children, context, partials, context :: callstack)
+ else EmptyProduct
+ case None =>
+ if (inverted) composite(children, context, partials, context :: callstack)
+ else EmptyProduct
+ case b: Boolean =>
+ if (b ^ inverted) composite(children, context, partials, context :: callstack)
+ else EmptyProduct
+ case s: Seq[_] if inverted =>
+ if (s.isEmpty) composite(children, context, partials, context :: callstack)
+ else EmptyProduct
+ case s: Seq[_] if !inverted =>
+ val tasks = for (element <- s; token <- children) yield (token, element)
+ composite(tasks, partials, context :: callstack)
+ case str: String =>
+ if (!inverted) StringProduct(str)
+ else EmptyProduct
+
+ case other =>
+ if (!inverted) composite(children, other, partials, context :: callstack)
+ else EmptyProduct
+ }
+
+ private def renderContent(context: Any, partials: Map[String, Mustache], callstack: List[Any])(
+ template: String
+ ): String =
+ // it will be children nodes in most cases
+ // TODO: some cache for dynamically generated templates?
+ if (template == childrenSource)
+ childrenTemplate.render(context, partials, context :: callstack)
+ else {
+ val t = new Mustache(template, startOTag, startCTag)
+ t.render(context, partials, context :: callstack)
+ }
+
+ def templateSource: String = source
+ }
+
+ case class UnescapedToken(key: String, otag: String, ctag: String)
+ extends Token
+ with ContextHandler
+ with ValuesFormatter {
+ private val source = otag + "&" + key + ctag
+
+ def render(
+ context: Any,
+ partials: Map[String, Mustache],
+ callstack: List[Any]
+ ): TokenProduct = {
+ val v = format(valueOf(key, context, partials, callstack, "", defaultRender(otag, ctag)))
+ new TokenProduct {
+ val maxLength: Int = v.length
+ def write(out: StringBuilder): Unit = { out.append(v) }
+ }
+ }
+
+ def templateSource: String = source
+ }
+
+ case class EscapedToken(key: String, otag: String, ctag: String)
+ extends Token
+ with ContextHandler
+ with ValuesFormatter {
+ private val source = otag + key + ctag
+
+ val transcode: Map[Char, String] = Map.empty
+// Map(
+// '<' -> "<",
+// '>' -> ">",
+// '"' -> """,
+// '&' -> "&"
+// )
+
+ def render(
+ context: Any,
+ partials: Map[String, Mustache],
+ callstack: List[Any]
+ ): TokenProduct = {
+ val v = format(valueOf(key, context, partials, callstack, "", defaultRender(otag, ctag)))
+ new TokenProduct {
+ val maxLength: Int = (v.length * 1.2).toInt
+ def write(out: StringBuilder): Unit =
+ v.foreach {
+ case t if transcode.contains(t) => out.append(transcode.get(t))
+ case c => out.append(c)
+ }
+ }
+ }
+
+ def templateSource: String = source
+ }
+
+ object Mustache {
+
+ def apply(path: String): Mustache = {
+ new Mustache(
+ Settings.MustacheRootPath match {
+ case Some(mustacheRootPath) =>
+ val file = s"$mustacheRootPath/$path"
+ if (new JFile(file).exists) {
+ Source.fromFile(file)
+ } else {
+ Source.fromInputStream(getClass.getClassLoader.getResourceAsStream(path))
+ }
+ case None =>
+ Source.fromInputStream(getClass.getClassLoader.getResourceAsStream(path))
+ }
+ )
+ }
+
+ }
diff --git a/common/src/main/scala/app/softnetwork/concurrent/package.scala b/common/src/main/scala/app/softnetwork/concurrent/package.scala
index 71602525..b76cb2e2 100644
--- a/common/src/main/scala/app/softnetwork/concurrent/package.scala
+++ b/common/src/main/scala/app/softnetwork/concurrent/package.scala
@@ -69,7 +69,7 @@ package object concurrent {
def retry(fn: => Future[T])(implicit ec: ExecutionContext): Future[T] = retry(nbTries)(fn)
def retry(n: Int)(fn: => Future[T])(implicit ec: ExecutionContext): Future[T] = {
- val p = Promise[T]
+ val p = Promise[T]()
fn onComplete {
case Success(x) => p.success(x)
case _ if n > 1 => p.completeWith(retry(n - 1)(fn))
diff --git a/common/src/test/scala/mustache/MustacheSpec.scala b/common/src/test/scala/mustache/MustacheSpec.scala
index 5101f11f..b6def31f 100644
--- a/common/src/test/scala/mustache/MustacheSpec.scala
+++ b/common/src/test/scala/mustache/MustacheSpec.scala
@@ -3,10 +3,6 @@ package mustache
import org.scalatest.wordspec.AnyWordSpec
import org.scalatest.matchers.should.Matchers
-import mustache._
-
-import scala.io.Source
-
/** Created by smanciot on 08/04/2018.
*/
class MustacheSpec extends AnyWordSpec with Matchers {
diff --git a/common/testkit/src/main/scala/app/softnetwork/concurrent/scalatest/CompletionTestKit.scala b/common/testkit/src/main/scala/app/softnetwork/concurrent/scalatest/CompletionTestKit.scala
index 3edd7d8a..8ee9dc81 100644
--- a/common/testkit/src/main/scala/app/softnetwork/concurrent/scalatest/CompletionTestKit.scala
+++ b/common/testkit/src/main/scala/app/softnetwork/concurrent/scalatest/CompletionTestKit.scala
@@ -12,8 +12,9 @@ import scala.util.{Failure, Success, Try}
import scala.language.reflectiveCalls
/** Created by smanciot on 12/04/2021.
- */
-trait CompletionTestKit extends Completion with Assertions { _: { def log: Logger } =>
+ */
+trait CompletionTestKit extends Completion with Assertions {
+ _: {def log: Logger} =>
implicit class AwaitAssertion[T](future: Future[T])(implicit atMost: Duration = defaultTimeout) {
def assert(fun: T => Assertion): Assertion =
@@ -42,13 +43,14 @@ trait CompletionTestKit extends Completion with Assertions { _: { def log: Logge
var done = false
while (tries <= maxTries && !done) {
- if (tries > 0) Thread.sleep(sleep * tries)
- tries = tries + 1
try {
+ tries = tries + 1
+ log.info(s"Waiting for $explain, try $tries/$maxTries")
+ Thread.sleep(sleep * tries)
done = predicate()
} catch {
case e: Throwable =>
- log.warn(s"problem while testing predicate ${e.getMessage}")
+ log.warn(s"problem while waiting for $explain: ${e.getMessage}")
}
}
diff --git a/core/build.sbt b/core/build.sbt
index 35a0b568..b8f3eda2 100644
--- a/core/build.sbt
+++ b/core/build.sbt
@@ -40,7 +40,7 @@ val kryo = Seq(
)
val chill = Seq(
- "com.twitter" % "chill-akka_2.12" % Versions.chill excludeAll ExclusionRule(organization = "com.typesafe.akka")
+ "com.twitter" %% "chill-akka" % Versions.chill excludeAll ExclusionRule(organization = "com.typesafe.akka")
)
val logback = Seq(
diff --git a/core/src/main/scala/app/softnetwork/persistence/package.scala b/core/src/main/scala/app/softnetwork/persistence/package.scala
index 7bd34e04..ecc5db2e 100644
--- a/core/src/main/scala/app/softnetwork/persistence/package.scala
+++ b/core/src/main/scala/app/softnetwork/persistence/package.scala
@@ -8,18 +8,19 @@ import java.time.Instant
import scala.language.implicitConversions
/** Created by smanciot on 13/04/2020.
- */
+ */
package object persistence {
trait ManifestWrapper[T] {
protected case class ManifestW()(implicit val wrapped: Manifest[T])
+
protected val manifestWrapper: ManifestW
}
def generateUUID(key: Option[String] = None): String =
key match {
case Some(clearText) => sha256(clearText)
- case _ => UUID.randomUUID().toString
+ case _ => UUID.randomUUID().toString
}
def now(): Date = Date.from(Instant.now())
@@ -29,8 +30,8 @@ package object persistence {
}
/** Used for akka and elastic persistence ids, one per targeted environment (development,
- * production, ...)
- */
+ * production, ...)
+ */
val version: String = sys.env.getOrElse("VERSION", PersistenceCoreBuildInfo.version)
val environment: String = sys.env.getOrElse(
diff --git a/core/src/main/scala/app/softnetwork/persistence/typed/scaladsl/Patterns.scala b/core/src/main/scala/app/softnetwork/persistence/typed/scaladsl/Patterns.scala
index cb24f2ab..e1ee42d6 100644
--- a/core/src/main/scala/app/softnetwork/persistence/typed/scaladsl/Patterns.scala
+++ b/core/src/main/scala/app/softnetwork/persistence/typed/scaladsl/Patterns.scala
@@ -151,7 +151,7 @@ trait SingletonPattern[C <: Command, R <: CommandResult]
val maybeSingletonRef = Option(singletonRef)
if (maybeSingletonRef.isEmpty) {
log.warn(s"actorRef for [$name] is undefined")
- system.receptionist ? Find(key) complete () match {
+ (system.receptionist ? Find(key)).complete() match {
case Success(s) => maybeActorRef = s.serviceInstances(key).headOption
case Failure(f) =>
log.error(f.getMessage, f)
@@ -164,7 +164,7 @@ trait SingletonPattern[C <: Command, R <: CommandResult]
log.info(s"spawn supervisor for singleton [$name]")
import app.softnetwork.persistence._
val supervisorRef = system.systemActorOf(supervisor, generateUUID())
- supervisorRef ? SingletonRef complete () match {
+ (supervisorRef ? SingletonRef).complete() match {
case Success(s) =>
maybeActorRef = Some(s.singletonRef)
log.info(s"actorRef for [$name] has been loaded -> ${s.singletonRef.path}")
diff --git a/core/src/test/scala/app/softnetwork/persistence/service/SingletonServiceSpec.scala b/core/src/test/scala/app/softnetwork/persistence/service/SingletonServiceSpec.scala
index 0388ee81..582eddd2 100644
--- a/core/src/test/scala/app/softnetwork/persistence/service/SingletonServiceSpec.scala
+++ b/core/src/test/scala/app/softnetwork/persistence/service/SingletonServiceSpec.scala
@@ -38,7 +38,7 @@ class SingletonServiceSpec
"SingletonService" must {
"run commands" in {
- run(TestSample) complete () match {
+ run(TestSample).complete() match {
case Success(s) =>
s match {
case SampleTested => log.info("sample tested !")
diff --git a/core/src/test/scala/app/softnetwork/persistence/typed/scaladsl/SingletonPatternSpec.scala b/core/src/test/scala/app/softnetwork/persistence/typed/scaladsl/SingletonPatternSpec.scala
index 339cddeb..78b60820 100644
--- a/core/src/test/scala/app/softnetwork/persistence/typed/scaladsl/SingletonPatternSpec.scala
+++ b/core/src/test/scala/app/softnetwork/persistence/typed/scaladsl/SingletonPatternSpec.scala
@@ -27,7 +27,7 @@ class SingletonPatternSpec extends SamplePattern with AnyWordSpecLike with Befor
implicit lazy val system: ActorSystem[Nothing] = testKit.system
- def ask(): Unit = this ? TestSample complete () match {
+ def ask(): Unit = (this ? TestSample).complete() match {
case Success(s) =>
s match {
case SampleTested => log.info("sample tested !")
diff --git a/core/testkit/src/main/resources/logback.xml b/core/testkit/src/main/resources/logback.xml
index 67b5ddff..a38273bf 100644
--- a/core/testkit/src/main/resources/logback.xml
+++ b/core/testkit/src/main/resources/logback.xml
@@ -32,6 +32,8 @@
+
+
diff --git a/core/testkit/src/main/scala/app/softnetwork/persistence/person/query/PersonToJsonProcessorStream.scala b/core/testkit/src/main/scala/app/softnetwork/persistence/person/query/PersonToJsonProcessorStream.scala
index 5a48d600..42151884 100644
--- a/core/testkit/src/main/scala/app/softnetwork/persistence/person/query/PersonToJsonProcessorStream.scala
+++ b/core/testkit/src/main/scala/app/softnetwork/persistence/person/query/PersonToJsonProcessorStream.scala
@@ -6,7 +6,7 @@ import app.softnetwork.persistence.query.{InMemoryJournalProvider, InMemoryOffse
import java.nio.file.{Files, Paths}
trait PersonToJsonProcessorStream
- extends PersonToExternalProcessorStream
+ extends PersonToExternalProcessorStream
with InMemoryJournalProvider
with InMemoryOffsetProvider
with JsonProvider[Person] {
diff --git a/core/testkit/src/main/scala/app/softnetwork/persistence/scalatest/PersistenceTestKit.scala b/core/testkit/src/main/scala/app/softnetwork/persistence/scalatest/PersistenceTestKit.scala
index b81b05fe..1d0d5f54 100644
--- a/core/testkit/src/main/scala/app/softnetwork/persistence/scalatest/PersistenceTestKit.scala
+++ b/core/testkit/src/main/scala/app/softnetwork/persistence/scalatest/PersistenceTestKit.scala
@@ -23,9 +23,9 @@ import scala.language.implicitConversions
import scala.reflect.ClassTag
/** Created by smanciot on 04/01/2020.
- */
+ */
trait PersistenceTestKit
- extends PersistenceGuardian
+ extends PersistenceGuardian
with BeforeAndAfterAll
with Eventually
with CompletionTestKit
@@ -61,91 +61,92 @@ trait PersistenceTestKit
}
/** @return
- * roles associated with this node
- */
+ * roles associated with this node
+ */
def roles: Seq[String] = Seq.empty
- final lazy val akka: String = s"""
- |akka {
- | stdout-loglevel = off // defaults to WARNING can be disabled with off. The stdout-loglevel is only in effect during system startup and shutdown
- | log-dead-letters-during-shutdown = on
- | loglevel = debug
- | log-dead-letters = on
- | log-config-on-start = off // Log the complete configuration at INFO level when the actor system is started
- | loggers = ["akka.event.slf4j.Slf4jLogger"]
- | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
- |}
- |
- |clustering.cluster.name = $systemName
- |
- |akka.cluster.roles = [${roles.mkString(",")}]
- |
- |akka.discovery {
- | config.services = {
- | $systemName = {
- | endpoints = [
- | {
- | host = "$hostname"
- | port = $managementPort
- | }
- | ]
- | }
- | }
- |}
- |
- |akka.management {
- | http {
- | hostname = $hostname
- | port = $managementPort
- | }
- | cluster.bootstrap {
- | contact-point-discovery {
- | service-name = $systemName
- | }
- | }
- |}
- |
- |akka.remote.artery.canonical.hostname = $hostname
- |akka.remote.artery.canonical.port = 0
- |
- |akka.coordinated-shutdown.exit-jvm = off
- |
- |akka.actor.testkit.typed {
- | # Factor by which to scale timeouts during tests, e.g. to account for shared
- | # build system load.
- | timefactor = 1.0
- |
- | # Duration to wait in expectMsg and friends outside of within() block
- | # by default.
- | # Dilated by the timefactor.
- | single-expect-default = 10s
- |
- | # Duration to wait in expectNoMessage by default.
- | # Dilated by the timefactor.
- | expect-no-message-default = 1000ms
- |
- | # The timeout that is used as an implicit Timeout.
- | # Dilated by the timefactor.
- | default-timeout = 5s
- |
- | # Default timeout for shutting down the actor system (used when no explicit timeout specified).
- | # Dilated by the timefactor.
- | system-shutdown-default=60s
- |
- | # Throw an exception on shutdown if the timeout is hit, if false an error is printed to stdout instead.
- | throw-on-shutdown-timeout=false
- |
- | # Duration to wait for all required logging events in LoggingTestKit.expect.
- | # Dilated by the timefactor.
- | filter-leeway = 3s
- |
- |}
- |
- |""".stripMargin + additionalConfig
+ final lazy val akka: String =
+ s"""
+ |akka {
+ | stdout-loglevel = off // defaults to WARNING can be disabled with off. The stdout-loglevel is only in effect during system startup and shutdown
+ | log-dead-letters-during-shutdown = on
+ | loglevel = debug
+ | log-dead-letters = on
+ | log-config-on-start = off // Log the complete configuration at INFO level when the actor system is started
+ | loggers = ["akka.event.slf4j.Slf4jLogger"]
+ | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
+ |}
+ |
+ |clustering.cluster.name = $systemName
+ |
+ |akka.cluster.roles = [${roles.mkString(",")}]
+ |
+ |akka.discovery {
+ | config.services = {
+ | $systemName = {
+ | endpoints = [
+ | {
+ | host = "$hostname"
+ | port = $managementPort
+ | }
+ | ]
+ | }
+ | }
+ |}
+ |
+ |akka.management {
+ | http {
+ | hostname = $hostname
+ | port = $managementPort
+ | }
+ | cluster.bootstrap {
+ | contact-point-discovery {
+ | service-name = $systemName
+ | }
+ | }
+ |}
+ |
+ |akka.remote.artery.canonical.hostname = $hostname
+ |akka.remote.artery.canonical.port = 0
+ |
+ |akka.coordinated-shutdown.exit-jvm = off
+ |
+ |akka.actor.testkit.typed {
+ | # Factor by which to scale timeouts during tests, e.g. to account for shared
+ | # build system load.
+ | timefactor = 1.0
+ |
+ | # Duration to wait in expectMsg and friends outside of within() block
+ | # by default.
+ | # Dilated by the timefactor.
+ | single-expect-default = 10s
+ |
+ | # Duration to wait in expectNoMessage by default.
+ | # Dilated by the timefactor.
+ | expect-no-message-default = 1000ms
+ |
+ | # The timeout that is used as an implicit Timeout.
+ | # Dilated by the timefactor.
+ | default-timeout = 5s
+ |
+ | # Default timeout for shutting down the actor system (used when no explicit timeout specified).
+ | # Dilated by the timefactor.
+ | system-shutdown-default=60s
+ |
+ | # Throw an exception on shutdown if the timeout is hit, if false an error is printed to stdout instead.
+ | throw-on-shutdown-timeout=false
+ |
+ | # Duration to wait for all required logging events in LoggingTestKit.expect.
+ | # Dilated by the timefactor.
+ | filter-leeway = 3s
+ |
+ |}
+ |
+ |""".stripMargin + additionalConfig
/** @return
- * additional configuration
- */
+ * additional configuration
+ */
def additionalConfig: String = ""
lazy val akkaConfig: Config = ConfigFactory.parseString(akka)
@@ -159,7 +160,7 @@ trait PersistenceTestKit
def typedSystem(): ActorSystem[Nothing] = system
/** `PatienceConfig` from [[_root_.akka.actor.testkit.typed.TestKitSettings#DefaultTimeout]]
- */
+ */
implicit val patience: PatienceConfig =
PatienceConfig(Settings.DefaultTimeout, Span(100, org.scalatest.time.Millis))
@@ -174,11 +175,11 @@ trait PersistenceTestKit
}
/** init and join cluster
- */
+ */
final def initAndJoinCluster(): Unit = {
testKit.spawn(setup(), "guardian")
// let the nodes join and become Up
- blockUntil("let the nodes join and become Up", 30, 2000)(() =>
+ blockUntil("the nodes join and become Up", 30, 2000)(() =>
Cluster(system).selfMember.status == MemberStatus.Up
)
}
diff --git a/counter/src/test/resources/application.conf b/counter/src/test/resources/application.conf
new file mode 100644
index 00000000..f01c7112
--- /dev/null
+++ b/counter/src/test/resources/application.conf
@@ -0,0 +1,3 @@
+akka.cluster.distributed-data.durable {
+ keys = []
+}
\ No newline at end of file
diff --git a/elastic/build.sbt b/elastic/build.sbt
deleted file mode 100644
index 1e43fe17..00000000
--- a/elastic/build.sbt
+++ /dev/null
@@ -1,34 +0,0 @@
-Test / parallelExecution := false
-
-organization := "app.softnetwork.persistence"
-
-name := "persistence-elastic"
-
-val elastic = Seq(
- "com.sksamuel.elastic4s" %% "elastic4s-core" % Versions.elastic4s exclude ("org.elasticsearch", "elasticsearch"),
- "com.sksamuel.elastic4s" %% "elastic4s-http" % Versions.elastic4s exclude ("org.elasticsearch", "elasticsearch"),
- "org.elasticsearch" % "elasticsearch" % Versions.elasticSearch exclude ("org.apache.logging.log4j", "log4j-api"),
- "com.sksamuel.elastic4s" %% "elastic4s-testkit" % Versions.elastic4s % Test exclude ("org.elasticsearch", "elasticsearch"),
- "com.sksamuel.elastic4s" %% "elastic4s-embedded" % Versions.elastic4s % Test exclude ("org.elasticsearch", "elasticsearch"),
- "com.sksamuel.elastic4s" %% "elastic4s-http" % Versions.elastic4s % Test exclude ("org.elasticsearch", "elasticsearch"),
- "org.elasticsearch" % "elasticsearch" % Versions.elasticSearch % Test exclude ("org.apache.logging.log4j", "log4j-api"),
- "org.apache.logging.log4j" % "log4j-api" % Versions.log4j % Test,
- "org.apache.logging.log4j" % "log4j-slf4j-impl" % Versions.log4j % Test,
- "org.apache.logging.log4j" % "log4j-core" % Versions.log4j % Test
-)
-
-val httpComponentsExclusions = Seq(
- ExclusionRule(organization = "org.apache.httpcomponents", name = "httpclient", artifact = "*", configurations = Vector(ConfigRef("test")), crossVersion = CrossVersion.disabled )
-)
-
-val guavaExclusion = ExclusionRule(organization = "com.google.guava", name="guava")
-
-val jest = Seq(
- "io.searchbox" % "jest" % Versions.jest
-).map(_.excludeAll(httpComponentsExclusions ++ Seq(guavaExclusion): _*))
-
-libraryDependencies ++= elastic ++ jest ++ Seq(
- "javax.activation" % "activation" % "1.1.1" % Test
-)
-
-Compile / unmanagedResourceDirectories += baseDirectory.value / "src/main/protobuf"
diff --git a/elastic/src/main/resources/mapping/default.mustache b/elastic/src/main/resources/mapping/default.mustache
deleted file mode 100644
index f3e19d45..00000000
--- a/elastic/src/main/resources/mapping/default.mustache
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "{{type}}": {
- "properties": {
- "uuid": {
- "type": "keyword",
- "index": true
- },
- "createdDate": {
- "type": "date"
- },
- "lastUpdated": {
- "type": "date"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/elastic/src/main/resources/softnetwork-elastic.conf b/elastic/src/main/resources/softnetwork-elastic.conf
deleted file mode 100644
index d884e512..00000000
--- a/elastic/src/main/resources/softnetwork-elastic.conf
+++ /dev/null
@@ -1,21 +0,0 @@
-elastic {
- ip = "localhost"
- ip = ${?ELASTIC_IP}
- port = 9200
- port = ${?ELASTIC_PORT}
-
- credentials {
- url = "http://"${elastic.ip}":"${elastic.port}
- username = ""
- password = ""
-
- url = ${?ELASTIC_CREDENTIALS_URL}
- username = ${?ELASTIC_CREDENTIALS_USERNAME}
- password = ${?ELASTIC_CREDENTIALS_PASSWORD}
-
- }
-
- multithreaded = true
- discovery-enabled = false
-
-}
\ No newline at end of file
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/client/ElasticClientApi.scala b/elastic/src/main/scala/app/softnetwork/elastic/client/ElasticClientApi.scala
deleted file mode 100644
index 4a87f7b7..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/client/ElasticClientApi.scala
+++ /dev/null
@@ -1,507 +0,0 @@
-package app.softnetwork.elastic.client
-
-import java.time.LocalDate
-import java.time.format.DateTimeFormatter
-import akka.NotUsed
-import akka.actor.ActorSystem
-import _root_.akka.stream.{FlowShape, Materializer}
-import akka.stream.scaladsl._
-import app.softnetwork.persistence.message.CountResponse
-import app.softnetwork.persistence.model.Timestamped
-import app.softnetwork.serialization._
-import app.softnetwork.elastic.sql.{SQLQueries, SQLQuery}
-import com.typesafe.config.{Config, ConfigFactory}
-import org.json4s.{DefaultFormats, Formats}
-import org.json4s.jackson.JsonMethods._
-
-import scala.collection.immutable.Seq
-import scala.concurrent.{Await, ExecutionContext, Future}
-import scala.concurrent.duration.Duration
-import scala.language.{implicitConversions, postfixOps}
-import scala.reflect.ClassTag
-
-/** Created by smanciot on 28/06/2018.
- */
-trait ElasticClientApi
- extends IndicesApi
- with UpdateSettingsApi
- with AliasApi
- with MappingApi
- with CountApi
- with SearchApi
- with IndexApi
- with UpdateApi
- with GetApi
- with BulkApi
- with DeleteApi
- with RefreshApi
- with FlushApi {
-
- def config: Config = ConfigFactory.load()
-
- final lazy val elasticConfig: ElasticConfig = ElasticConfig(config)
-}
-
-trait IndicesApi {
- val defaultSettings: String =
- """
- |{
- | "index": {
- | "max_ngram_diff": "20",
- | "mapping" : {
- | "total_fields" : {
- | "limit" : "2000"
- | }
- | },
- | "analysis": {
- | "analyzer": {
- | "ngram_analyzer": {
- | "tokenizer": "ngram_tokenizer",
- | "filter": [
- | "lowercase",
- | "asciifolding"
- | ]
- | },
- | "search_analyzer": {
- | "type": "custom",
- | "tokenizer": "standard",
- | "filter": [
- | "lowercase",
- | "asciifolding"
- | ]
- | }
- | },
- | "tokenizer": {
- | "ngram_tokenizer": {
- | "type": "ngram",
- | "min_gram": 1,
- | "max_gram": 20,
- | "token_chars": [
- | "letter",
- | "digit"
- | ]
- | }
- | }
- | }
- | }
- |}
- """.stripMargin
-
- def createIndex(index: String, settings: String = defaultSettings): Boolean
-
- def deleteIndex(index: String): Boolean
-
- def closeIndex(index: String): Boolean
-
- def openIndex(index: String): Boolean
-}
-
-trait AliasApi {
- def addAlias(index: String, alias: String): Boolean
-}
-
-trait UpdateSettingsApi { _: IndicesApi =>
- def toggleRefresh(index: String, enable: Boolean): Unit = {
- updateSettings(
- index,
- if (!enable) """{"index" : {"refresh_interval" : -1} }"""
- else """{"index" : {"refresh_interval" : "1s"} }"""
- )
- }
-
- def setReplicas(index: String, replicas: Int): Unit = {
- updateSettings(index, s"""{"index" : {"number_of_replicas" : $replicas} }""")
- }
-
- def updateSettings(index: String, settings: String = defaultSettings): Boolean
-}
-
-trait MappingApi {
- def setMapping(index: String, _type: String, mapping: String): Boolean
-}
-
-trait RefreshApi {
- def refresh(index: String): Boolean
-}
-
-trait FlushApi {
- def flush(index: String, force: Boolean = true, wait: Boolean = true): Boolean
-}
-
-trait IndexApi {
- def index[U <: Timestamped](
- entity: U,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit u: ClassTag[U], formats: Formats): Boolean = {
- val _type = maybeType.getOrElse(u.runtimeClass.getSimpleName.toLowerCase)
- this.index(index.getOrElse(_type), _type, entity.uuid, serialization.write[U](entity))
- }
-
- def index(index: String, _type: String, id: String, source: String): Boolean
-
- def indexAsync[U <: Timestamped](
- entity: U,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit u: ClassTag[U], ec: ExecutionContext, formats: Formats): Future[Boolean] = {
- val _type = maybeType.getOrElse(u.runtimeClass.getSimpleName.toLowerCase)
- indexAsync(index.getOrElse(_type), _type, entity.uuid, serialization.write[U](entity))
- }
-
- def indexAsync(index: String, _type: String, id: String, source: String)(implicit
- ec: ExecutionContext
- ): Future[Boolean]
-}
-
-trait UpdateApi {
- def update[U <: Timestamped](
- entity: U,
- index: Option[String] = None,
- maybeType: Option[String] = None,
- upsert: Boolean = true
- )(implicit u: ClassTag[U], formats: Formats): Boolean = {
- val _type = maybeType.getOrElse(u.runtimeClass.getSimpleName.toLowerCase)
- this.update(index.getOrElse(_type), _type, entity.uuid, serialization.write[U](entity), upsert)
- }
-
- def update(index: String, _type: String, id: String, source: String, upsert: Boolean): Boolean
-
- def updateAsync[U <: Timestamped](
- entity: U,
- index: Option[String] = None,
- maybeType: Option[String] = None,
- upsert: Boolean = true
- )(implicit u: ClassTag[U], ec: ExecutionContext, formats: Formats): Future[Boolean] = {
- val _type = maybeType.getOrElse(u.runtimeClass.getSimpleName.toLowerCase)
- this.updateAsync(
- index.getOrElse(_type),
- _type,
- entity.uuid,
- serialization.write[U](entity),
- upsert
- )
- }
-
- def updateAsync(index: String, _type: String, id: String, source: String, upsert: Boolean)(
- implicit ec: ExecutionContext
- ): Future[Boolean]
-}
-
-trait DeleteApi {
- def delete[U <: Timestamped](
- entity: U,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit u: ClassTag[U]): Boolean = {
- val _type = maybeType.getOrElse(u.runtimeClass.getSimpleName.toLowerCase)
- delete(entity.uuid, index.getOrElse(_type), _type)
- }
-
- def delete(uuid: String, index: String, _type: String): Boolean
-
- def deleteAsync[U <: Timestamped](
- entity: U,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit u: ClassTag[U], ec: ExecutionContext): Future[Boolean] = {
- val _type = maybeType.getOrElse(u.runtimeClass.getSimpleName.toLowerCase)
- deleteAsync(entity.uuid, index.getOrElse(_type), _type)
- }
-
- def deleteAsync(uuid: String, index: String, _type: String)(implicit
- ec: ExecutionContext
- ): Future[Boolean]
-
-}
-
-trait BulkApi { _: RefreshApi with UpdateSettingsApi =>
- type A
- type R
-
- def toBulkAction(bulkItem: BulkItem): A
-
- implicit def toBulkElasticAction(a: A): BulkElasticAction
-
- implicit def toBulkElasticResult(r: R): BulkElasticResult
-
- def bulk(implicit bulkOptions: BulkOptions, system: ActorSystem): Flow[Seq[A], R, NotUsed]
-
- def bulkResult: Flow[R, Set[String], NotUsed]
-
- /** +----------+
- * | |
- * | Source | items: Iterator[D]
- * | |
- * +----------+
- * |
- * v
- * +----------+
- * | |
- * |transform | BulkableAction
- * | |
- * +----------+
- * |
- * v
- * +----------+
- * | |
- * | settings | Update elasticsearch settings (refresh and replicas)
- * | |
- * +----------+
- * |
- * v
- * +----------+
- * | |
- * | group |
- * | |
- * +----------+
- * |
- * v
- * +----------+ +----------+
- * | |------->| |
- * | balance | | bulk |
- * | |------->| |
- * +----------+ +----------+
- * | |
- * | |
- * | |
- * +---------+ | |
- * | |<-----------' |
- * | merge | |
- * | |<----------------'
- * +---------+
- * |
- * v
- * +----------+
- * | |
- * | result | BulkResult
- * | |
- * +----------+
- * |
- * v
- * +----------+
- * | |
- * | Sink | indices: Set[String]
- * | |
- * +----------+
- *
- * Asynchronously bulk items to Elasticsearch
- *
- * @param items the items for which a bulk has to be performed
- * @param toDocument the function to transform items to elastic documents in json format
- * @param idKey the key mapping to the document id
- * @param suffixDateKey the key mapping to the date used to suffix the index
- * @param suffixDatePattern the date pattern used to suffix the index
- * @param update whether to upsert or not the items
- * @param delete whether to delete or not the items
- * @param parentIdKey the key mapping to the elastic parent document id
- * @param bulkOptions bulk options
- * @param system actor system
- * @tparam D the type of the items
- * @return the indexes on which the documents have been indexed
- */
- def bulk[D](
- items: Iterator[D],
- toDocument: D => String,
- idKey: Option[String] = None,
- suffixDateKey: Option[String] = None,
- suffixDatePattern: Option[String] = None,
- update: Option[Boolean] = None,
- delete: Option[Boolean] = None,
- parentIdKey: Option[String] = None
- )(implicit bulkOptions: BulkOptions, system: ActorSystem): Set[String] = {
-
- implicit val materializer: Materializer = Materializer(system)
-
- import GraphDSL.Implicits._
-
- val source = Source.fromIterator(() => items)
-
- val sink = Sink.fold[Set[String], Set[String]](Set.empty[String])(_ ++ _)
-
- val g = Flow.fromGraph(GraphDSL.create() { implicit b =>
- val transform =
- b.add(
- Flow[D].map(item =>
- toBulkAction(
- toBulkItem(
- toDocument,
- idKey,
- suffixDateKey,
- suffixDatePattern,
- update,
- delete,
- parentIdKey,
- item
- )
- )
- )
- )
-
- val settings = b.add(BulkSettings[A](bulkOptions.disableRefresh)(this, toBulkElasticAction))
-
- val group = b.add(Flow[A].named("group").grouped(bulkOptions.maxBulkSize).map { items =>
-// logger.info(s"Preparing to write batch of ${items.size}...")
- items
- })
-
- val parallelism = Math.max(1, bulkOptions.balance)
-
- val bulkFlow: FlowShape[Seq[A], R] = b.add(bulk)
-
- val result = b.add(bulkResult)
-
- if (parallelism > 1) {
- val balancer = b.add(Balance[Seq[A]](parallelism))
-
- val merge = b.add(Merge[R](parallelism))
-
- transform ~> settings ~> group ~> balancer
-
- 1 to parallelism foreach { _ =>
- balancer ~> bulkFlow ~> merge
- }
-
- merge ~> result
- } else {
- transform ~> settings ~> group ~> bulkFlow ~> result
- }
-
- FlowShape(transform.in, result.out)
- })
-
- val future = source.via(g).toMat(sink)(Keep.right).run()
-
- val indices = Await.result(future, Duration.Inf)
- indices.foreach(refresh)
- indices
- }
-
- def toBulkItem[D](
- toDocument: D => String,
- idKey: Option[String],
- suffixDateKey: Option[String],
- suffixDatePattern: Option[String],
- update: Option[Boolean],
- delete: Option[Boolean],
- parentIdKey: Option[String],
- item: D
- )(implicit bulkOptions: BulkOptions): BulkItem = {
-
- implicit val formats: DefaultFormats = org.json4s.DefaultFormats
- val document = toDocument(item)
- val jsonMap = parse(document, useBigDecimalForDouble = false).extract[Map[String, Any]]
- // extract id
- val id = idKey.flatMap { i =>
- jsonMap.get(i).map(_.toString)
- }
-
- // extract final index name
- val index = suffixDateKey
- .flatMap { s =>
- // Expecting a date field YYYY-MM-dd ...
- jsonMap.get(s).map { d =>
- val strDate = d.toString.substring(0, 10)
- val date = LocalDate.parse(strDate, DateTimeFormatter.ofPattern("yyyy-MM-dd"))
- date.format(
- suffixDatePattern
- .map(DateTimeFormatter.ofPattern)
- .getOrElse(DateTimeFormatter.ofPattern("yyyy-MM-dd"))
- )
- }
- }
- .map(s => s"${bulkOptions.index}-$s")
- // use suffix if available otherwise only index
- .getOrElse(bulkOptions.index)
-
- // extract parent key
- val parent = parentIdKey.flatMap { i =>
- jsonMap.get(i).map(_.toString)
- }
-
- val action = delete match {
- case Some(d) if d => BulkAction.DELETE
- case _ =>
- update match {
- case Some(u) if u => BulkAction.UPDATE
- case _ => BulkAction.INDEX
- }
- }
-
- val body = action match {
- case BulkAction.UPDATE => docAsUpsert(document)
- case _ => document
- }
-
- BulkItem(index, action, body, id, parent)
- }
-
-}
-
-trait CountApi {
- def countAsync(query: JSONQuery)(implicit ec: ExecutionContext): Future[Option[Double]]
-
- def count(query: JSONQuery): Option[Double]
-
- def countAsync(sqlQuery: SQLQuery)(implicit
- ec: ExecutionContext
- ): Future[_root_.scala.collection.Seq[CountResponse]]
-}
-
-trait GetApi {
- def get[U <: Timestamped](
- id: String,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit m: Manifest[U], formats: Formats): Option[U]
-
- def getAsync[U <: Timestamped](
- id: String,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit m: Manifest[U], ec: ExecutionContext, formats: Formats): Future[Option[U]]
-}
-
-trait SearchApi {
-
- def search[U](jsonQuery: JSONQuery)(implicit m: Manifest[U], formats: Formats): List[U]
-
- def search[U](sqlQuery: SQLQuery)(implicit m: Manifest[U], formats: Formats): List[U]
-
- def searchAsync[U](
- sqlQuery: SQLQuery
- )(implicit m: Manifest[U], ec: ExecutionContext, formats: Formats): Future[List[U]]
-
- def searchWithInnerHits[U, I](sqlQuery: SQLQuery, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[(U, List[I])]
-
- def searchWithInnerHits[U, I](jsonQuery: JSONQuery, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[(U, List[I])]
-
- def multiSearch[U](
- sqlQueries: SQLQueries
- )(implicit m: Manifest[U], formats: Formats): List[List[U]]
-
- def multiSearch[U](
- jsonQueries: JSONQueries
- )(implicit m: Manifest[U], formats: Formats): List[List[U]]
-
- def multiSearchWithInnerHits[U, I](sqlQueries: SQLQueries, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[List[(U, List[I])]]
-
- def multiSearchWithInnerHits[U, I](jsonQueries: JSONQueries, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[List[(U, List[I])]]
-
-}
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestClientApi.scala b/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestClientApi.scala
deleted file mode 100644
index 0324f69e..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestClientApi.scala
+++ /dev/null
@@ -1,680 +0,0 @@
-package app.softnetwork.elastic.client.jest
-
-import akka.NotUsed
-import akka.actor.ActorSystem
-import akka.stream.scaladsl.Flow
-import app.softnetwork.elastic.client._
-import app.softnetwork.elastic.sql.{ElasticQuery, SQLQueries, SQLQuery}
-import app.softnetwork.persistence.message.CountResponse
-import app.softnetwork.persistence.model.Timestamped
-import app.softnetwork.serialization._
-import io.searchbox.action.BulkableAction
-import io.searchbox.core._
-import io.searchbox.core.search.aggregation.RootAggregation
-import io.searchbox.indices.aliases.{AddAliasMapping, ModifyAliases}
-import io.searchbox.indices.mapping.PutMapping
-import io.searchbox.indices.settings.UpdateSettings
-import io.searchbox.indices._
-import io.searchbox.params.Parameters
-import org.json4s.Formats
-
-import scala.collection.JavaConverters._
-import scala.collection.immutable.Seq
-import scala.concurrent.{ExecutionContext, Future, Promise}
-import scala.language.implicitConversions
-import scala.util.{Failure, Success, Try}
-
-/** Created by smanciot on 20/05/2021.
- */
-trait JestClientApi
- extends ElasticClientApi
- with JestIndicesApi
- with JestAliasApi
- with JestUpdateSettingsApi
- with JestMappingApi
- with JestRefreshApi
- with JestFlushApi
- with JestCountApi
- with JestIndexApi
- with JestUpdateApi
- with JestDeleteApi
- with JestGetApi
- with JestSearchApi
- with JestBulkApi
-
-trait JestIndicesApi extends IndicesApi with JestClientCompanion {
- override def createIndex(index: String, settings: String = defaultSettings): Boolean =
- apply().execute(new CreateIndex.Builder(index).settings(settings).build()).isSucceeded
- override def deleteIndex(index: String): Boolean =
- apply().execute(new DeleteIndex.Builder(index).build()).isSucceeded
- override def closeIndex(index: String): Boolean =
- apply().execute(new CloseIndex.Builder(index).build()).isSucceeded
- override def openIndex(index: String): Boolean =
- apply().execute(new OpenIndex.Builder(index).build()).isSucceeded
-}
-
-trait JestAliasApi extends AliasApi with JestClientCompanion {
- override def addAlias(index: String, alias: String): Boolean = {
- apply()
- .execute(
- new ModifyAliases.Builder(
- new AddAliasMapping.Builder(index, alias).build()
- ).build()
- )
- .isSucceeded
- }
-}
-
-trait JestUpdateSettingsApi extends UpdateSettingsApi with JestClientCompanion { _: IndicesApi =>
- override def updateSettings(index: String, settings: String = defaultSettings): Boolean =
- closeIndex(index) &&
- apply().execute(new UpdateSettings.Builder(settings).addIndex(index).build()).isSucceeded &&
- openIndex(index)
-}
-
-trait JestMappingApi extends MappingApi with JestClientCompanion {
- override def setMapping(index: String, _type: String, mapping: String): Boolean =
- apply().execute(new PutMapping.Builder(index, _type, mapping).build()).isSucceeded
-}
-
-trait JestRefreshApi extends RefreshApi with JestClientCompanion {
- override def refresh(index: String): Boolean =
- apply().execute(new Refresh.Builder().addIndex(index).build()).isSucceeded
-}
-
-trait JestFlushApi extends FlushApi with JestClientCompanion {
- override def flush(index: String, force: Boolean = true, wait: Boolean = true): Boolean = apply()
- .execute(
- new Flush.Builder().addIndex(index).force(force).waitIfOngoing(wait).build()
- )
- .isSucceeded
-}
-
-trait JestCountApi extends CountApi with JestClientCompanion {
- override def countAsync(
- jsonQuery: JSONQuery
- )(implicit ec: ExecutionContext): Future[Option[Double]] = {
- import JestClientResultHandler._
- import jsonQuery._
- val count = new Count.Builder().query(query)
- for (indice <- indices) count.addIndex(indice)
- for (t <- types) count.addType(t)
- val promise = Promise[Option[Double]]()
- apply().executeAsyncPromise(count.build()) onComplete {
- case Success(result) =>
- if (!result.isSucceeded)
- logger.error(result.getErrorMessage)
- promise.success(Option(result.getCount))
- case Failure(f) =>
- logger.error(f.getMessage, f)
- promise.failure(f)
- }
- promise.future
- }
-
- override def count(jsonQuery: JSONQuery): Option[Double] = {
- import jsonQuery._
- val count = new Count.Builder().query(query)
- for (indice <- indices) count.addIndex(indice)
- for (t <- types) count.addType(t)
- val result = apply().execute(count.build())
- if (!result.isSucceeded)
- logger.error(result.getErrorMessage)
- Option(result.getCount)
- }
-
- override def countAsync(
- sqlQuery: SQLQuery
- )(implicit ec: ExecutionContext): Future[_root_.scala.collection.Seq[CountResponse]] = {
- val futures = for (elasticCount <- ElasticQuery.count(sqlQuery)) yield {
- val promise: Promise[CountResponse] = Promise()
- import collection.immutable.Seq
- val _field = elasticCount.field
- val _sourceField = elasticCount.sourceField
- val _agg = elasticCount.agg
- val _query = elasticCount.query
- val _sources = elasticCount.sources
- _sourceField match {
- case "_id" =>
- countAsync(
- JSONQuery(_query, Seq(_sources: _*), Seq.empty[String])
- ).onComplete {
- case Success(result) =>
- promise.success(CountResponse(_field, result.getOrElse(0d).toInt, None))
- case Failure(f) =>
- logger.error(f.getMessage, f.fillInStackTrace())
- promise.success(CountResponse(_field, 0, Some(f.getMessage)))
- }
- case _ =>
- import JestClientApi._
- import JestClientResultHandler._
- apply()
- .executeAsyncPromise(JSONQuery(_query, Seq(_sources: _*), Seq.empty[String]).search)
- .onComplete {
- case Success(result) =>
- val agg = _agg.split("\\.").last
-
- val itAgg = _agg.split("\\.").iterator
-
- var root =
- if (elasticCount.nested)
- result.getAggregations.getAggregation(itAgg.next(), classOf[RootAggregation])
- else
- result.getAggregations
-
- if (elasticCount.filtered) {
- root = root.getAggregation(itAgg.next(), classOf[RootAggregation])
- }
-
- promise.success(
- CountResponse(
- _field,
- if (elasticCount.distinct)
- root.getCardinalityAggregation(agg).getCardinality.toInt
- else
- root.getValueCountAggregation(agg).getValueCount.toInt,
- None
- )
- )
-
- case Failure(f) =>
- logger.error(f.getMessage, f.fillInStackTrace())
- promise.success(CountResponse(_field, 0, Some(f.getMessage)))
- }
- }
- promise.future
- }
- Future.sequence(futures)
- }
-}
-
-trait JestIndexApi extends IndexApi with JestClientCompanion {
- override def index(index: String, _type: String, id: String, source: String): Boolean = {
- Try(
- apply().execute(
- new Index.Builder(source).index(index).`type`(_type).id(id).build()
- )
- ) match {
- case Success(s) =>
- if (!s.isSucceeded)
- logger.error(s.getErrorMessage)
- s.isSucceeded
- case Failure(f) =>
- logger.error(f.getMessage, f)
- false
- }
- }
-
- override def indexAsync(index: String, _type: String, id: String, source: String)(implicit
- ec: ExecutionContext
- ): Future[Boolean] = {
- import JestClientResultHandler._
- val promise: Promise[Boolean] = Promise()
- apply().executeAsyncPromise(
- new Index.Builder(source).index(index).`type`(_type).id(id).build()
- ) onComplete {
- case Success(s) => promise.success(s.isSucceeded)
- case Failure(f) =>
- logger.error(f.getMessage, f)
- promise.failure(f)
- }
- promise.future
- }
-
-}
-
-trait JestUpdateApi extends UpdateApi with JestClientCompanion {
- override def update(
- index: String,
- _type: String,
- id: String,
- source: String,
- upsert: Boolean
- ): Boolean = {
- Try(
- apply().execute(
- new Update.Builder(
- if (upsert)
- docAsUpsert(source)
- else
- source
- ).index(index).`type`(_type).id(id).build()
- )
- ) match {
- case Success(s) =>
- if (!s.isSucceeded)
- logger.error(s.getErrorMessage)
- s.isSucceeded
- case Failure(f) =>
- logger.error(f.getMessage, f)
- false
- }
- }
-
- override def updateAsync(
- index: String,
- _type: String,
- id: String,
- source: String,
- upsert: Boolean
- )(implicit ec: ExecutionContext): Future[Boolean] = {
- import JestClientResultHandler._
- val promise: Promise[Boolean] = Promise()
- apply().executeAsyncPromise(
- new Update.Builder(
- if (upsert)
- docAsUpsert(source)
- else
- source
- ).index(index).`type`(_type).id(id).build()
- ) onComplete {
- case Success(s) =>
- if (!s.isSucceeded)
- logger.error(s.getErrorMessage)
- promise.success(s.isSucceeded)
- case Failure(f) =>
- logger.error(f.getMessage, f)
- promise.failure(f)
- }
- promise.future
- }
-
-}
-
-trait JestDeleteApi extends DeleteApi with JestClientCompanion {
- override def delete(uuid: String, index: String, _type: String): Boolean = {
- val result = apply().execute(
- new Delete.Builder(uuid).index(index).`type`(_type).build()
- )
- if (!result.isSucceeded) {
- logger.error(result.getErrorMessage)
- }
- result.isSucceeded
- }
-
- override def deleteAsync(uuid: String, index: String, _type: String)(implicit
- ec: ExecutionContext
- ): Future[Boolean] = {
- import JestClientResultHandler._
- val promise: Promise[Boolean] = Promise()
- apply().executeAsyncPromise(
- new Delete.Builder(uuid).index(index).`type`(_type).build()
- ) onComplete {
- case Success(s) =>
- if (!s.isSucceeded)
- logger.error(s.getErrorMessage)
- promise.success(s.isSucceeded)
- case Failure(f) =>
- logger.error(f.getMessage, f)
- promise.failure(f)
- }
- promise.future
- }
-
-}
-
-trait JestGetApi extends GetApi with JestClientCompanion {
-
- // GetApi
- override def get[U <: Timestamped](
- id: String,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit m: Manifest[U], formats: Formats): Option[U] = {
- val result = apply().execute(
- new Get.Builder(
- index.getOrElse(
- maybeType.getOrElse(
- m.runtimeClass.getSimpleName.toLowerCase
- )
- ),
- id
- ).build()
- )
- if (result.isSucceeded) {
- Some(serialization.read[U](result.getSourceAsString))
- } else {
- logger.error(result.getErrorMessage)
- None
- }
- }
-
- override def getAsync[U <: Timestamped](
- id: String,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit m: Manifest[U], ec: ExecutionContext, formats: Formats): Future[Option[U]] = {
- import JestClientResultHandler._
- val promise: Promise[Option[U]] = Promise()
- apply().executeAsyncPromise(
- new Get.Builder(
- index.getOrElse(
- maybeType.getOrElse(
- m.runtimeClass.getSimpleName.toLowerCase
- )
- ),
- id
- ).build()
- ) onComplete {
- case Success(result) =>
- if (result.isSucceeded)
- promise.success(Some(serialization.read[U](result.getSourceAsString)))
- else {
- logger.error(result.getErrorMessage)
- promise.success(None)
- }
- case Failure(f) =>
- logger.error(f.getMessage, f)
- promise.failure(f)
- }
- promise.future
- }
-
-}
-
-trait JestSearchApi extends SearchApi with JestClientCompanion {
-
- import JestClientApi._
-
- override def search[U](
- jsonQuery: JSONQuery
- )(implicit m: Manifest[U], formats: Formats): List[U] = {
- import jsonQuery._
- val search = new Search.Builder(query)
- for (indice <- indices) search.addIndex(indice)
- for (t <- types) search.addType(t)
- Try(
- apply()
- .execute(search.build())
- .getSourceAsStringList
- .asScala
- .map(source => serialization.read[U](source))
- .toList
- ) match {
- case Success(s) => s
- case Failure(f) =>
- logger.error(f.getMessage, f)
- List.empty
- }
- }
-
- override def search[U](sqlQuery: SQLQuery)(implicit m: Manifest[U], formats: Formats): List[U] = {
- val search: Option[Search] = sqlQuery.search
- (search match {
- case Some(s) =>
- val result = apply().execute(s)
- if (result.isSucceeded) {
- Some(result)
- } else {
- logger.error(result.getErrorMessage)
- None
- }
- case _ => None
- }) match {
- case Some(searchResult) =>
- Try(
- searchResult.getSourceAsStringList.asScala
- .map(source => serialization.read[U](source))
- .toList
- ) match {
- case Success(s) => s
- case Failure(f) =>
- logger.error(f.getMessage, f)
- List.empty
- }
- case _ => List.empty
- }
- }
-
- override def searchAsync[U](
- sqlQuery: SQLQuery
- )(implicit m: Manifest[U], ec: ExecutionContext, formats: Formats): Future[List[U]] = {
- val promise = Promise[List[U]]()
- val search: Option[Search] = sqlQuery.search
- search match {
- case Some(s) =>
- import JestClientResultHandler._
- apply().executeAsyncPromise(s) onComplete {
- case Success(searchResult) =>
- promise.success(
- searchResult.getSourceAsStringList.asScala
- .map(source => serialization.read[U](source))
- .toList
- )
- case Failure(f) =>
- promise.failure(f)
- }
- case _ => promise.success(List.empty)
- }
- promise.future
- }
-
- override def searchWithInnerHits[U, I](sqlQuery: SQLQuery, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[(U, List[I])] = {
- val search: Option[Search] = sqlQuery.search
- (search match {
- case Some(s) =>
- val result = apply().execute(s)
- if (result.isSucceeded) {
- Some(result)
- } else {
- logger.error(result.getErrorMessage)
- None
- }
- case _ => None
- }) match {
- case Some(searchResult) =>
- Try(searchResult.getJsonObject ~> [U, I] innerField) match {
- case Success(s) => s
- case Failure(f) =>
- logger.error(f.getMessage, f)
- List.empty
- }
- case _ => List.empty
- }
- }
-
- override def searchWithInnerHits[U, I](jsonQuery: JSONQuery, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[(U, List[I])] = {
- val result = apply().execute(jsonQuery.search)
- (if (result.isSucceeded) {
- Some(result)
- } else {
- logger.error(result.getErrorMessage)
- None
- }) match {
- case Some(searchResult) =>
- Try(searchResult.getJsonObject ~> [U, I] innerField) match {
- case Success(s) => s
- case Failure(f) =>
- logger.error(f.getMessage, f)
- List.empty
- }
- case _ => List.empty
- }
- }
-
- override def multiSearch[U](
- sqlQueries: SQLQueries
- )(implicit m: Manifest[U], formats: Formats): List[List[U]] = {
- val searches: List[Search] = sqlQueries.queries.flatMap(_.search)
- (if (searches.size == sqlQueries.queries.size) {
- Some(apply().execute(new MultiSearch.Builder(searches.asJava).build()))
- } else {
- None
- }) match {
- case Some(multiSearchResult) =>
- multiSearchResult.getResponses.asScala
- .map(searchResponse =>
- searchResponse.searchResult.getSourceAsStringList.asScala
- .map(source => serialization.read[U](source))
- .toList
- )
- .toList
- case _ => List.empty
- }
- }
-
- override def multiSearch[U](
- jsonQueries: JSONQueries
- )(implicit m: Manifest[U], formats: Formats): List[List[U]] = {
- val searches: List[Search] = jsonQueries.queries.map(_.search)
- val multiSearchResult = apply().execute(new MultiSearch.Builder(searches.asJava).build())
- multiSearchResult.getResponses.asScala
- .map(searchResponse =>
- searchResponse.searchResult.getSourceAsStringList.asScala
- .map(source => serialization.read[U](source))
- .toList
- )
- .toList
- }
-
- override def multiSearchWithInnerHits[U, I](sqlQueries: SQLQueries, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[List[(U, List[I])]] = {
- val searches: List[Search] = sqlQueries.queries.flatMap(_.search)
- if (searches.size == sqlQueries.queries.size) {
- nativeMultiSearchWithInnerHits(searches, innerField)
- } else {
- List.empty
- }
- }
-
- override def multiSearchWithInnerHits[U, I](jsonQueries: JSONQueries, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[List[(U, List[I])]] = {
- nativeMultiSearchWithInnerHits(jsonQueries.queries.map(_.search), innerField)
- }
-
- private[this] def nativeMultiSearchWithInnerHits[U, I](
- searches: List[Search],
- innerField: String
- )(implicit m1: Manifest[U], m2: Manifest[I], formats: Formats): List[List[(U, List[I])]] = {
- val multiSearchResult = apply().execute(new MultiSearch.Builder(searches.asJava).build())
- if (multiSearchResult.isSucceeded) {
- multiSearchResult.getResponses.asScala
- .map(searchResponse => searchResponse.searchResult.getJsonObject ~> [U, I] innerField)
- .toList
- } else {
- logger.error(multiSearchResult.getErrorMessage)
- List.empty
- }
- }
-
-}
-
-trait JestBulkApi
- extends JestRefreshApi
- with JestUpdateSettingsApi
- with JestIndicesApi
- with BulkApi
- with JestClientCompanion {
- override type A = BulkableAction[DocumentResult]
- override type R = BulkResult
-
- override implicit def toBulkElasticAction(a: A): BulkElasticAction =
- new BulkElasticAction {
- override def index: String = a.getIndex
- }
-
- private[this] def toBulkElasticResultItem(i: BulkResult#BulkResultItem): BulkElasticResultItem =
- new BulkElasticResultItem {
- override def index: String = i.index
- }
-
- override implicit def toBulkElasticResult(r: R): BulkElasticResult =
- new BulkElasticResult {
- override def items: List[BulkElasticResultItem] =
- r.getItems.asScala.toList.map(toBulkElasticResultItem)
- }
-
- override def bulk(implicit
- bulkOptions: BulkOptions,
- system: ActorSystem
- ): Flow[Seq[A], R, NotUsed] = {
- import JestClientResultHandler._
- val parallelism = Math.max(1, bulkOptions.balance)
-
- Flow[Seq[BulkableAction[DocumentResult]]]
- .named("bulk")
- .mapAsyncUnordered[BulkResult](parallelism)(items => {
- logger.info(s"Starting to write batch of ${items.size}...")
- val init =
- new Bulk.Builder().defaultIndex(bulkOptions.index).defaultType(bulkOptions.documentType)
- val bulkQuery = items.foldLeft(init) { (current, query) =>
- current.addAction(query)
- }
- apply().executeAsyncPromise(bulkQuery.build())
- })
- }
-
- override def bulkResult: Flow[R, Set[String], NotUsed] =
- Flow[BulkResult]
- .named("result")
- .map(result => {
- val items = result.getItems
- val indices = items.asScala.map(_.index).toSet
- logger.info(s"Finished to write batch of ${items.size} within ${indices.mkString(",")}.")
- indices
- })
-
- override def toBulkAction(bulkItem: BulkItem): A = {
- val builder = bulkItem.action match {
- case BulkAction.DELETE => new Delete.Builder(bulkItem.body)
- case BulkAction.UPDATE => new Update.Builder(bulkItem.body)
- case _ => new Index.Builder(bulkItem.body)
- }
- bulkItem.id.foreach(builder.id)
- builder.index(bulkItem.index)
- bulkItem.parent.foreach(s => builder.setParameter(Parameters.PARENT, s))
- builder.build()
- }
-
-}
-
-object JestClientApi {
- implicit class SearchSQLQuery(sqlQuery: SQLQuery) {
- def search: Option[Search] = {
- import ElasticQuery._
- select(sqlQuery) match {
- case Some(elasticSelect) =>
- import elasticSelect._
- Console.println(query)
- val search = new Search.Builder(query)
- for (source <- sources) search.addIndex(source)
- Some(search.build())
- case _ => None
- }
- }
- }
-
- implicit class SearchJSONQuery(jsonQuery: JSONQuery) {
- def search: Search = {
- import jsonQuery._
- val _search = new Search.Builder(query)
- for (indice <- indices) _search.addIndex(indice)
- for (t <- types) _search.addType(t)
- _search.build()
- }
- }
-
- implicit class SearchResults(searchResult: SearchResult) {
- def apply[M: Manifest]()(implicit formats: Formats): List[M] = {
- searchResult.getSourceAsStringList.asScala.map(source => serialization.read[M](source)).toList
- }
- }
-
- implicit class JestBulkAction(bulkableAction: BulkableAction[DocumentResult]) {
- def index: String = bulkableAction.getIndex
- }
-}
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestClientCompanion.scala b/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestClientCompanion.scala
deleted file mode 100644
index b9c78a83..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestClientCompanion.scala
+++ /dev/null
@@ -1,160 +0,0 @@
-package app.softnetwork.elastic.client.jest
-
-import java.io.IOException
-import java.util
-import java.util.concurrent.TimeUnit
-import app.softnetwork.elastic.client.{ElasticConfig, ElasticCredentials}
-import com.sksamuel.exts.Logging
-import io.searchbox.action.Action
-import io.searchbox.client.{JestClient, JestClientFactory, JestResult, JestResultHandler}
-import io.searchbox.client.config.HttpClientConfig
-import org.apache.http.HttpHost
-
-import scala.collection.JavaConverters._
-import scala.util.{Failure, Success, Try}
-
-import scala.language.reflectiveCalls
-
-/** Created by smanciot on 20/05/2021.
- */
-trait JestClientCompanion extends Logging {
-
- def elasticConfig: ElasticConfig
-
- private[this] var jestClient: Option[InnerJestClient] = None
-
- private[this] val factory = new JestClientFactory()
-
- private[this] var httpClientConfig: HttpClientConfig = _
-
- private[this] class InnerJestClient(private var _jestClient: JestClient) extends JestClient {
- private[this] var nbFailures: Int = 0
-
- override def shutdownClient(): Unit = {
- close()
- }
-
- private def checkClient(): Unit = {
- Option(_jestClient) match {
- case None =>
- factory.setHttpClientConfig(httpClientConfig)
- _jestClient = Try(factory.getObject) match {
- case Success(s) =>
- s
- case Failure(f) =>
- logger.error(f.getMessage, f)
- throw f
- }
- case _ =>
- }
- }
-
- override def executeAsync[J <: JestResult](
- clientRequest: Action[J],
- jestResultHandler: JestResultHandler[_ >: J]
- ): Unit = {
- Try(checkClient())
- Option(_jestClient) match {
- case Some(s) => s.executeAsync[J](clientRequest, jestResultHandler)
- case _ =>
- close()
- jestResultHandler.failed(new Exception("JestClient not initialized"))
- }
- }
-
- override def execute[J <: JestResult](clientRequest: Action[J]): J = {
- Try(checkClient())
- Option(_jestClient) match {
- case Some(j) =>
- Try(j.execute[J](clientRequest)) match {
- case Success(s) =>
- nbFailures = 0
- s
- case Failure(f) =>
- f match {
- case e: IOException =>
- nbFailures += 1
- logger.error(e.getMessage, e)
- close()
- if (nbFailures < 10) {
- Thread.sleep(1000 * nbFailures)
- execute(clientRequest)
- } else {
- throw f
- }
- case e: IllegalStateException =>
- nbFailures += 1
- logger.error(e.getMessage, e)
- close()
- if (nbFailures < 10) {
- Thread.sleep(1000 * nbFailures)
- execute(clientRequest)
- } else {
- throw f
- }
- case _ =>
- close()
- throw f
- }
- }
- case _ =>
- close()
- throw new Exception("JestClient not initialized")
- }
- }
-
- override def setServers(servers: util.Set[String]): Unit = {
- Try(checkClient())
- Option(_jestClient).foreach(_.setServers(servers))
- }
-
- override def close(): Unit = {
- Option(_jestClient).foreach(_.close())
- _jestClient = null
- }
- }
-
- private[this] def getHttpHosts(esUrl: String): Set[HttpHost] = {
- esUrl
- .split(",")
- .map(u => {
- val url = new java.net.URL(u)
- new HttpHost(url.getHost, url.getPort, url.getProtocol)
- })
- .toSet
- }
-
- def apply(): JestClient = {
- apply(
- elasticConfig.credentials,
- multithreaded = elasticConfig.multithreaded,
- discoveryEnabled = elasticConfig.discoveryEnabled
- )
- }
-
- def apply(
- esCredentials: ElasticCredentials,
- multithreaded: Boolean = true,
- timeout: Int = 60000,
- discoveryEnabled: Boolean = false,
- discoveryFrequency: Long = 60L,
- discoveryFrequencyTimeUnit: TimeUnit = TimeUnit.SECONDS
- ): JestClient = {
- jestClient match {
- case Some(s) => s
- case None =>
- httpClientConfig = new HttpClientConfig.Builder(esCredentials.url)
- .defaultCredentials(esCredentials.username, esCredentials.password)
- .preemptiveAuthTargetHosts(getHttpHosts(esCredentials.url).asJava)
- .multiThreaded(multithreaded)
- .discoveryEnabled(discoveryEnabled)
- .discoveryFrequency(discoveryFrequency, discoveryFrequencyTimeUnit)
- .connTimeout(timeout)
- .readTimeout(timeout)
- .build()
- factory.setHttpClientConfig(httpClientConfig)
- jestClient = Some(new InnerJestClient(factory.getObject))
- jestClient.get
- }
- }
-}
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestClientResultHandler.scala b/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestClientResultHandler.scala
deleted file mode 100644
index fc05e2e2..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestClientResultHandler.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package app.softnetwork.elastic.client.jest
-
-import io.searchbox.action.Action
-import io.searchbox.client.{JestClient, JestResult, JestResultHandler}
-import io.searchbox.core.BulkResult
-
-import scala.concurrent.{Future, Promise}
-
-/** Created by smanciot on 28/04/17.
- */
-private class JestClientResultHandler[T <: JestResult] extends JestResultHandler[T] {
-
- protected val promise: Promise[T] = Promise()
-
- override def completed(result: T): Unit =
- if (!result.isSucceeded)
- promise.failure(new Exception(s"${result.getErrorMessage} - ${result.getJsonString}"))
- else {
- result match {
- case r: BulkResult if !r.getFailedItems.isEmpty =>
- promise.failure(
- new Exception(s"We don't allow any failed item while indexing ${result.getJsonString}")
- )
- case _ => promise.success(result)
-
- }
- }
-
- override def failed(exception: Exception): Unit = promise.failure(exception)
-
- def future: Future[T] = promise.future
-
-}
-
-object JestClientResultHandler {
-
- implicit class PromiseJestClient(jestClient: JestClient) {
- def executeAsyncPromise[T <: JestResult](clientRequest: Action[T]): Future[T] = {
- val resultHandler = new JestClientResultHandler[T]()
- jestClient.executeAsync(clientRequest, resultHandler)
- resultHandler.future
- }
- }
-}
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestProvider.scala b/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestProvider.scala
deleted file mode 100644
index 19e12c0a..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/client/jest/JestProvider.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package app.softnetwork.elastic.client.jest
-
-import app.softnetwork.elastic.persistence.query.ElasticProvider
-import app.softnetwork.persistence.ManifestWrapper
-import app.softnetwork.persistence.model.Timestamped
-
-/** Created by smanciot on 20/05/2021.
- */
-trait JestProvider[T <: Timestamped] extends ElasticProvider[T] with JestClientApi {
- _: ManifestWrapper[T] =>
-}
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/client/package.scala b/elastic/src/main/scala/app/softnetwork/elastic/client/package.scala
deleted file mode 100644
index 9fa533e3..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/client/package.scala
+++ /dev/null
@@ -1,169 +0,0 @@
-package app.softnetwork.elastic
-
-import akka.stream.{Attributes, FlowShape, Inlet, Outlet}
-import akka.stream.stage.{GraphStage, GraphStageLogic}
-import app.softnetwork.elastic.client.BulkAction.BulkAction
-import app.softnetwork.serialization._
-import com.google.gson.{Gson, JsonElement, JsonObject}
-import com.typesafe.config.{Config, ConfigFactory}
-import com.typesafe.scalalogging.StrictLogging
-import configs.Configs
-import org.json4s.Formats
-
-import scala.collection.immutable.Seq
-import scala.collection.mutable
-import scala.language.reflectiveCalls
-import scala.util.{Failure, Success, Try}
-
-/** Created by smanciot on 30/06/2018.
- */
-package object client {
-
- case class ElasticCredentials(
- url: String = "http://localhost:9200",
- username: String = "",
- password: String = ""
- )
-
- case class ElasticConfig(
- credentials: ElasticCredentials = ElasticCredentials(),
- multithreaded: Boolean = true,
- discoveryEnabled: Boolean = false
- )
-
- object ElasticConfig extends StrictLogging {
- def apply(config: Config): ElasticConfig = {
- Configs[ElasticConfig]
- .get(config.withFallback(ConfigFactory.load("softnetwork-elastic.conf")), "elastic")
- .toEither match {
- case Left(configError) =>
- logger.error(s"Something went wrong with the provided arguments $configError")
- throw configError.configException
- case Right(r) => r
- }
- }
- }
-
- object BulkAction extends Enumeration {
- type BulkAction = Value
- val INDEX: client.BulkAction.Value = Value(0, "INDEX")
- val UPDATE: client.BulkAction.Value = Value(1, "UPDATE")
- val DELETE: client.BulkAction.Value = Value(2, "DELETE")
- }
-
- case class BulkItem(
- index: String,
- action: BulkAction,
- body: String,
- id: Option[String],
- parent: Option[String]
- )
-
- case class BulkOptions(
- index: String,
- documentType: String,
- maxBulkSize: Int = 100,
- balance: Int = 1,
- disableRefresh: Boolean = false
- )
-
- trait BulkElasticAction { def index: String }
-
- trait BulkElasticResult { def items: List[BulkElasticResultItem] }
-
- trait BulkElasticResultItem { def index: String }
-
- case class BulkSettings[A](disableRefresh: Boolean = false)(implicit
- updateSettingsApi: UpdateSettingsApi,
- toBulkElasticAction: A => BulkElasticAction
- ) extends GraphStage[FlowShape[A, A]] {
-
- val in: Inlet[A] = Inlet[A]("Filter.in")
- val out: Outlet[A] = Outlet[A]("Filter.out")
-
- val shape: FlowShape[A, A] = FlowShape.of(in, out)
-
- val indices = mutable.Set.empty[String]
-
- override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = {
- new GraphStageLogic(shape) {
- setHandler(
- in,
- () => {
- val elem = grab(in)
- val index = elem.index
- if (!indices.contains(index)) {
- if (disableRefresh) {
- updateSettingsApi.updateSettings(
- index,
- """{"index" : {"refresh_interval" : "-1", "number_of_replicas" : 0} }"""
- )
- }
- indices.add(index)
- }
- push(out, elem)
- }
- )
- setHandler(
- out,
- () => {
- pull(in)
- }
- )
- }
- }
- }
-
- def docAsUpsert(doc: String): String = s"""{"doc":$doc,"doc_as_upsert":true}"""
-
- implicit class InnerHits(searchResult: JsonObject) {
- import scala.collection.JavaConverters._
- def ~>[M, I](
- innerField: String
- )(implicit formats: Formats, m: Manifest[M], i: Manifest[I]): List[(M, List[I])] = {
- def innerHits(result: JsonElement) = {
- result.getAsJsonObject
- .get("inner_hits")
- .getAsJsonObject
- .get(innerField)
- .getAsJsonObject
- .get("hits")
- .getAsJsonObject
- .get("hits")
- .getAsJsonArray
- .iterator()
- }
- val gson = new Gson()
- val results = searchResult.get("hits").getAsJsonObject.get("hits").getAsJsonArray.iterator()
- (for (result <- results.asScala)
- yield (
- result match {
- case obj: JsonObject =>
- Try {
- serialization.read[M](gson.toJson(obj.get("_source")))
- } match {
- case Success(s) => s
- case Failure(f) =>
- throw f
- }
- case _ => serialization.read[M](result.getAsString)
- },
- (for (innerHit <- innerHits(result).asScala) yield innerHit match {
- case obj: JsonObject =>
- Try {
- serialization.read[I](gson.toJson(obj.get("_source")))
- } match {
- case Success(s) => s
- case Failure(f) =>
- throw f
- }
- case _ => serialization.read[I](innerHit.getAsString)
- }).toList
- )).toList
- }
- }
-
- case class JSONQuery(query: String, indices: Seq[String], types: Seq[String] = Seq.empty)
-
- case class JSONQueries(queries: List[JSONQuery])
-}
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/persistence/query/ElasticProvider.scala b/elastic/src/main/scala/app/softnetwork/elastic/persistence/query/ElasticProvider.scala
deleted file mode 100644
index 955e4679..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/persistence/query/ElasticProvider.scala
+++ /dev/null
@@ -1,175 +0,0 @@
-package app.softnetwork.elastic.persistence.query
-
-import app.softnetwork.elastic.client.ElasticClientApi
-import app.softnetwork.elastic.sql.SQLQuery
-import mustache.Mustache
-import org.json4s.Formats
-import app.softnetwork.persistence._
-import app.softnetwork.persistence.model.Timestamped
-import app.softnetwork.persistence.query.ExternalPersistenceProvider
-import app.softnetwork.serialization.commonFormats
-import app.softnetwork.elastic.persistence.typed.Elastic._
-import com.sksamuel.exts.Logging
-
-import scala.reflect.ClassTag
-import scala.util.{Failure, Success, Try}
-
-/** Created by smanciot on 16/05/2020.
- */
-trait ElasticProvider[T <: Timestamped] extends ExternalPersistenceProvider[T] with Logging {
- _: ElasticClientApi with ManifestWrapper[T] =>
-
- implicit def formats: Formats = commonFormats
-
- protected lazy val index: String = getIndex[T](manifestWrapper.wrapped)
-
- protected lazy val _type: String = getType[T](manifestWrapper.wrapped)
-
- protected lazy val alias: String = getAlias[T](manifestWrapper.wrapped)
-
- protected def mappingPath: Option[String] = None
-
- protected def loadMapping(path: Option[String] = None): String = {
- val pathOrElse: String = path.getOrElse(s"""mapping/${_type}.mustache""")
- Try(Mustache(pathOrElse).render(Map("type" -> _type))) match {
- case Success(s) =>
- s
- case Failure(f) =>
- logger.error(s"$pathOrElse -> f.getMessage", f)
- "{}"
- }
- }
-
- protected def initIndex(): Unit = {
- Try {
- createIndex(index)
- addAlias(index, alias)
- setMapping(index, _type, loadMapping(mappingPath))
- } match {
- case Success(_) => logger.info(s"index:$index type:${_type} alias:$alias created")
- case Failure(f) =>
- logger.error(s"!!!!! index:$index type:${_type} alias:$alias -> ${f.getMessage}", f)
- }
- }
-
- // ExternalPersistenceProvider
-
- /** Creates the underlying document to the external system
- *
- * @param document
- * - the document to create
- * @param t
- * - implicit ClassTag for T
- * @return
- * whether the operation is successful or not
- */
- override def createDocument(document: T)(implicit t: ClassTag[T]): Boolean = {
- Try(index(document, Some(index), Some(_type))) match {
- case Success(_) => true
- case Failure(f) =>
- logger.error(f.getMessage, f)
- false
- }
- }
-
- /** Updates the underlying document to the external system
- *
- * @param document
- * - the document to update
- * @param upsert
- * - whether or not to create the underlying document if it does not exist in the external
- * system
- * @param t
- * - implicit ClassTag for T
- * @return
- * whether the operation is successful or not
- */
- override def updateDocument(document: T, upsert: Boolean)(implicit t: ClassTag[T]): Boolean = {
- Try(update(document, Some(index), Some(_type), upsert)) match {
- case Success(_) => true
- case Failure(f) =>
- logger.error(f.getMessage, f)
- false
- }
- }
-
- /** Deletes the underlying document referenced by its uuid to the external system
- *
- * @param uuid
- * - the uuid of the document to delete
- * @return
- * whether the operation is successful or not
- */
- override def deleteDocument(uuid: String): Boolean = {
- Try(
- delete(uuid, index, _type)
- ) match {
- case Success(value) => value
- case Failure(f) =>
- logger.error(f.getMessage, f)
- false
- }
- }
-
- /** Upsert the underlying document referenced by its uuid to the external system
- *
- * @param uuid
- * - the uuid of the document to upsert
- * @param data
- * - a map including all the properties and values tu upsert for the document
- * @return
- * whether the operation is successful or not
- */
- override def upsertDocument(uuid: String, data: String): Boolean = {
- logger.debug(s"Upserting document $uuid with $data")
- Try(
- update(
- index,
- _type,
- uuid,
- data,
- upsert = true
- )
- ) match {
- case Success(_) => true
- case Failure(f) =>
- logger.error(f.getMessage, f)
- false
- }
- }
-
- /** Load the document referenced by its uuid
- *
- * @param uuid
- * - the document uuid
- * @return
- * the document retrieved, None otherwise
- */
- override def loadDocument(uuid: String)(implicit m: Manifest[T], formats: Formats): Option[T] = {
- Try(get(uuid, Some(index), Some(_type))) match {
- case Success(s) => s
- case Failure(f) =>
- logger.error(f.getMessage, f)
- None
- }
- }
-
- /** Search documents
- *
- * @param query
- * - the search query
- * @return
- * the documents founds or an empty list otherwise
- */
- override def searchDocuments(
- query: String
- )(implicit m: Manifest[T], formats: Formats): List[T] = {
- Try(search(SQLQuery(query))) match {
- case Success(s) => s
- case Failure(f) =>
- logger.error(f.getMessage, f)
- List.empty
- }
- }
-
-}
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/persistence/query/State2ElasticProcessorStream.scala b/elastic/src/main/scala/app/softnetwork/elastic/persistence/query/State2ElasticProcessorStream.scala
deleted file mode 100644
index 88af3f3b..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/persistence/query/State2ElasticProcessorStream.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package app.softnetwork.elastic.persistence.query
-
-import app.softnetwork.persistence.ManifestWrapper
-import app.softnetwork.persistence.query.{
- JournalProvider,
- OffsetProvider,
- State2ExternalProcessorStream
-}
-import app.softnetwork.persistence.model.Timestamped
-import app.softnetwork.persistence.message._
-
-/** Created by smanciot on 16/05/2020.
- */
-trait State2ElasticProcessorStream[T <: Timestamped, E <: CrudEvent]
- extends State2ExternalProcessorStream[T, E]
- with ManifestWrapper[T] { _: JournalProvider with OffsetProvider with ElasticProvider[T] =>
-
- override val externalProcessor = "elastic"
-
- override protected def init(): Unit = {
- initIndex()
- }
-
-}
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/persistence/query/State2ElasticProcessorStreamWithJestProvider.scala b/elastic/src/main/scala/app/softnetwork/elastic/persistence/query/State2ElasticProcessorStreamWithJestProvider.scala
deleted file mode 100644
index 21a628c2..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/persistence/query/State2ElasticProcessorStreamWithJestProvider.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-package app.softnetwork.elastic.persistence.query
-
-import app.softnetwork.elastic.client.jest.JestProvider
-import app.softnetwork.persistence.message.CrudEvent
-import app.softnetwork.persistence.model.Timestamped
-import app.softnetwork.persistence.query.{JournalProvider, OffsetProvider}
-
-trait State2ElasticProcessorStreamWithJestProvider[T <: Timestamped, E <: CrudEvent]
- extends State2ElasticProcessorStream[T, E]
- with JestProvider[T] { _: JournalProvider with OffsetProvider => }
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/persistence/typed/Elastic.scala b/elastic/src/main/scala/app/softnetwork/elastic/persistence/typed/Elastic.scala
deleted file mode 100644
index 1efe4d59..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/persistence/typed/Elastic.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package app.softnetwork.elastic.persistence.typed
-
-import app.softnetwork.persistence._
-
-import app.softnetwork.persistence.model.Timestamped
-
-import scala.language.implicitConversions
-
-import app.softnetwork.persistence._
-
-/** Created by smanciot on 10/04/2020.
- */
-object Elastic {
-
- def index(_type: String): String = {
- s"${_type}s-$environment".toLowerCase
- }
-
- def alias(_type: String): String = {
- s"${_type}s-$environment-v$version".toLowerCase
- }
-
- def getAlias[T <: Timestamped](implicit m: Manifest[T]): String = {
- alias(getType[T])
- }
-
- def getIndex[T <: Timestamped](implicit m: Manifest[T]): String = {
- index(getType[T])
- }
-
-}
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/sql/ElasticFilters.scala b/elastic/src/main/scala/app/softnetwork/elastic/sql/ElasticFilters.scala
deleted file mode 100644
index cf19fa74..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/sql/ElasticFilters.scala
+++ /dev/null
@@ -1,138 +0,0 @@
-package app.softnetwork.elastic.sql
-
-import com.sksamuel.elastic4s.ElasticApi._
-import com.sksamuel.elastic4s.searches.ScoreMode
-import com.sksamuel.elastic4s.searches.queries.Query
-import com.sksamuel.elastic4s.searches.queries.term.{BuildableTermsQuery, TermsQuery}
-
-import scala.annotation.tailrec
-
-/** Created by smanciot on 27/06/2018.
- */
-object ElasticFilters {
-
- import SQLImplicits._
-
- implicit def BuildableTermsNoOp[T]: BuildableTermsQuery[T] = new BuildableTermsQuery[T] {
- override def build(q: TermsQuery[T]): Any = null // not used by the http builders
- }
-
- def filter(query: String): Query = {
- val criteria: Option[SQLCriteria] = query
- filter(criteria)
- }
-
- def filter(criteria: Option[SQLCriteria]): Query = {
-
- var _innerHits: Set[String] = Set.empty
-
- @tailrec
- def _innerHit(name: String, inc: Int = 1): String = {
- if (_innerHits.contains(name)) {
- val incName = s"$name$inc"
- if (_innerHits.contains(incName)) {
- _innerHit(name, inc + 1)
- } else {
- _innerHits += incName
- incName
- }
- } else {
- _innerHits += name
- name
- }
- }
-
- def _filter(criteria: SQLCriteria): Query = {
- criteria match {
- case ElasticGeoDistance(identifier, distance, lat, lon) =>
- geoDistanceQuery(identifier.identifier)
- .point(lat.value, lon.value) distance distance.value
- case SQLExpression(identifier, operator, value) =>
- value match {
- case n: SQLNumeric[Any] @unchecked =>
- operator match {
- case _: GE.type => rangeQuery(identifier.identifier) gte n.sql
- case _: GT.type => rangeQuery(identifier.identifier) gt n.sql
- case _: LE.type => rangeQuery(identifier.identifier) lte n.sql
- case _: LT.type => rangeQuery(identifier.identifier) lt n.sql
- case _: EQ.type => termQuery(identifier.identifier, n.sql)
- case _: NE.type => not(termQuery(identifier.identifier, n.sql))
- case _ => matchAllQuery
- }
- case l: SQLLiteral =>
- operator match {
- case _: LIKE.type => regexQuery(identifier.identifier, toRegex(l.value))
- case _: GE.type => rangeQuery(identifier.identifier) gte l.value
- case _: GT.type => rangeQuery(identifier.identifier) gt l.value
- case _: LE.type => rangeQuery(identifier.identifier) lte l.value
- case _: LT.type => rangeQuery(identifier.identifier) lt l.value
- case _: EQ.type => termQuery(identifier.identifier, l.value)
- case _: NE.type => not(termQuery(identifier.identifier, l.value))
- case _ => matchAllQuery
- }
- case b: SQLBoolean =>
- operator match {
- case _: EQ.type => termQuery(identifier.identifier, b.value)
- case _: NE.type => not(termQuery(identifier.identifier, b.value))
- case _ => matchAllQuery
- }
- case _ => matchAllQuery
- }
- case SQLIsNull(identifier) => not(existsQuery(identifier.identifier))
- case SQLIsNotNull(identifier) => existsQuery(identifier.identifier)
- case SQLPredicate(left, operator, right, _not) =>
- operator match {
- case _: AND.type =>
- if (_not.isDefined)
- bool(Seq(_filter(left)), Seq.empty, Seq(_filter(right)))
- else
- boolQuery().filter(_filter(left), _filter(right))
- case _: OR.type => should(_filter(left), _filter(right))
- case _ => matchAllQuery
- }
- case SQLIn(identifier, values, n) =>
- val _values: Seq[Any] = values.innerValues
- val t =
- _values.headOption match {
- case Some(_: Double) =>
- termsQuery(identifier.identifier, _values.asInstanceOf[Seq[Double]])
- case Some(_: Integer) =>
- termsQuery(identifier.identifier, _values.asInstanceOf[Seq[Integer]])
- case Some(_: Long) =>
- termsQuery(identifier.identifier, _values.asInstanceOf[Seq[Long]])
- case _ => termsQuery(identifier.identifier, _values.map(_.toString))
- }
- n match {
- case Some(_) => not(t)
- case None => t
- }
- case SQLBetween(identifier, from, to) =>
- rangeQuery(identifier.identifier) gte from.value lte to.value
- case relation: ElasticRelation =>
- import scala.language.reflectiveCalls
- val t = relation.`type`
- t match {
- case Some(_) =>
- relation match {
- case _: ElasticNested =>
- nestedQuery(t.get, _filter(relation.criteria)).inner(innerHits(_innerHit(t.get)))
- case _: ElasticChild =>
- hasChildQuery(t.get, _filter(relation.criteria), ScoreMode.None)
- case _: ElasticParent =>
- hasParentQuery(t.get, _filter(relation.criteria), score = false)
- case _ => matchAllQuery
- }
- case _ => matchAllQuery
- }
- case _ => matchAllQuery
- }
- }
-
- criteria match {
- case Some(c) => _filter(c)
- case _ => matchAllQuery
- }
-
- }
-
-}
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/sql/ElasticQuery.scala b/elastic/src/main/scala/app/softnetwork/elastic/sql/ElasticQuery.scala
deleted file mode 100644
index 84b8c404..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/sql/ElasticQuery.scala
+++ /dev/null
@@ -1,174 +0,0 @@
-package app.softnetwork.elastic.sql
-
-import com.sksamuel.elastic4s.ElasticApi._
-import com.sksamuel.elastic4s.http.search.SearchBodyBuilderFn
-import com.sksamuel.elastic4s.searches.queries.{BoolQuery, Query}
-
-/** Created by smanciot on 27/06/2018.
- */
-object ElasticQuery {
-
- import ElasticFilters._
- import SQLImplicits._
-
- def select(sqlQuery: SQLQuery): Option[ElasticSelect] = select(sqlQuery.query)
-
- private[this] def select(query: String): Option[ElasticSelect] = {
- val select: Option[SQLSelectQuery] = query
- select match {
-
- case Some(s) =>
- val criteria = s.where match {
- case Some(w) => w.criteria
- case _ => None
- }
-
- val fields = s.select.fields.map(_.identifier.identifier)
-
- val sources = s.from.tables.map((table: SQLTable) => table.source.sql)
-
- val queryFiltered = filter(criteria) match {
- case b: BoolQuery => b
- case q: Query => boolQuery().filter(q)
- }
-
- var _search = search("") query {
- queryFiltered
- } sourceInclude fields
-
- _search = s.limit match {
- case Some(l) => _search limit l.limit from 0
- case _ => _search
- }
-
- val q = SearchBodyBuilderFn(_search).string()
-
- Some(ElasticSelect(s.select.fields, sources, q.replace("\"version\":true,", "") /*FIXME*/ ))
-
- case _ => None
- }
- }
-
- def count(sqlQuery: SQLQuery): Seq[ElasticCount] = {
- val select: Option[SQLSelectQuery] = sqlQuery.query
- count(select)
- }
-
- private[this] def count(select: Option[SQLSelectQuery]): Seq[ElasticCount] = {
- select match {
- case Some(s: SQLCountQuery) =>
- val criteria = s.where match {
- case Some(w) => w.criteria
- case _ => None
- }
- val sources = s.from.tables.map((table: SQLTable) => table.source.sql)
- s.selectCount.countFields.map((countField: SQLCountField) => {
- val sourceField = countField.identifier.identifier
-
- val field = countField.alias match {
- case Some(alias) => alias.alias
- case _ => sourceField
- }
-
- val distinct = countField.identifier.distinct.isDefined
-
- val filtered = countField.filter
-
- val isFiltered = filtered.isDefined
-
- val nested = sourceField.contains(".")
-
- val agg =
- if (distinct)
- s"agg_distinct_${sourceField.replace(".", "_")}"
- else
- s"agg_${sourceField.replace(".", "_")}"
-
- var aggPath = Seq[String]()
-
- val queryFiltered = filter(criteria) match {
- case b: BoolQuery => b
- case q: Query => boolQuery().filter(q)
- }
-
- val q =
- if (sourceField.equalsIgnoreCase("_id")) { // "native" elastic count
- SearchBodyBuilderFn(
- search("") query {
- queryFiltered
- }
- ).string()
- } else {
- val _agg =
- if (distinct)
- cardinalityAgg(agg, sourceField)
- else
- valueCountAgg(agg, sourceField)
-
- def _filtered = {
- if (isFiltered) {
- val filteredAgg = s"filtered_agg"
- aggPath ++= Seq(filteredAgg)
- filterAgg(filteredAgg, filter(filtered.get.criteria)) subaggs {
- aggPath ++= Seq(agg)
- _agg
- }
- } else {
- aggPath ++= Seq(agg)
- _agg
- }
- }
-
- SearchBodyBuilderFn(
- search("") query {
- queryFiltered
- }
- aggregations {
- if (nested) {
- val path = sourceField.split("\\.").head
- val nestedAgg = s"nested_$path"
- aggPath ++= Seq(nestedAgg)
- nestedAggregation(nestedAgg, path) subaggs {
- _filtered
- }
- } else {
- _filtered
- }
- }
- size 0
- ).string()
- }
-
- ElasticCount(
- aggPath.mkString("."),
- field,
- sourceField,
- sources,
- q.replace("\"version\":true,", ""), /*FIXME*/
- distinct,
- nested,
- isFiltered
- )
- })
- case _ => Seq.empty
- }
- }
-
-}
-
-case class ElasticCount(
- agg: String,
- field: String,
- sourceField: String,
- sources: Seq[String],
- query: String,
- distinct: Boolean = false,
- nested: Boolean = false,
- filtered: Boolean = false
-)
-
-case class ElasticSelect(
- fields: Seq[SQLField],
- sources: Seq[String],
- query: String
-)
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/sql/SQLImplicits.scala b/elastic/src/main/scala/app/softnetwork/elastic/sql/SQLImplicits.scala
deleted file mode 100644
index 5888c0cd..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/sql/SQLImplicits.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package app.softnetwork.elastic.sql
-
-import scala.util.matching.Regex
-
-/** Created by smanciot on 27/06/2018.
- */
-object SQLImplicits {
- import scala.language.implicitConversions
-
- implicit def queryToSQLCriteria(query: String): Option[SQLCriteria] = {
- val sql: Option[SQLSelectQuery] = query
- sql match {
- case Some(q) =>
- q.where match {
- case Some(w) => w.criteria
- case _ => None
- }
- case _ => None
- }
- }
- implicit def queryToSQLQuery(query: String): Option[SQLSelectQuery] = {
- SQLParser(query) match {
- case Left(_) => None
- case Right(r) => Some(r)
- }
- }
-
- implicit def sqllikeToRegex(value: String): Regex = toRegex(value).r
-
-}
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/sql/SQLParser.scala b/elastic/src/main/scala/app/softnetwork/elastic/sql/SQLParser.scala
deleted file mode 100644
index 73716a81..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/sql/SQLParser.scala
+++ /dev/null
@@ -1,303 +0,0 @@
-package app.softnetwork.elastic.sql
-
-import scala.util.parsing.combinator.RegexParsers
-
-/** Created by smanciot on 27/06/2018.
- */
-object SQLParser extends RegexParsers {
-
- val regexAlias = """\$?[a-zA-Z0-9_]*"""
-
- val regexRef = """\$[a-zA-Z0-9_]*"""
-
- def identifier: Parser[SQLIdentifier] =
- "(?i)distinct".r.? ~ (regexRef.r ~ ".").? ~ """[\*a-zA-Z_\-][a-zA-Z0-9_\-\.\[\]]*""".r ^^ {
- case d ~ a ~ str =>
- SQLIdentifier(
- str,
- a match {
- case Some(x) => Some(x._1)
- case _ => None
- },
- d
- )
- }
-
- def literal: Parser[SQLLiteral] =
- """"[^"]*"""".r ^^ (str => SQLLiteral(str.substring(1, str.length - 1)))
-
- def int: Parser[SQLInt] = """(-)?(0|[1-9]\d*)""".r ^^ (str => SQLInt(str.toInt))
-
- def double: Parser[SQLDouble] = """(-)?(\d+\.\d+)""".r ^^ (str => SQLDouble(str.toDouble))
-
- def boolean: Parser[SQLBoolean] = """(true|false)""".r ^^ (bool => SQLBoolean(bool.toBoolean))
-
- def eq: Parser[SQLExpressionOperator] = "=" ^^ (_ => EQ)
- def ge: Parser[SQLExpressionOperator] = ">=" ^^ (_ => GE)
- def gt: Parser[SQLExpressionOperator] = ">" ^^ (_ => GT)
- def in: Parser[SQLExpressionOperator] = "(?i)in".r ^^ (_ => IN)
- def le: Parser[SQLExpressionOperator] = "<=" ^^ (_ => LE)
- def like: Parser[SQLExpressionOperator] = "(?i)like".r ^^ (_ => LIKE)
- def lt: Parser[SQLExpressionOperator] = "<" ^^ (_ => LT)
- def ne: Parser[SQLExpressionOperator] = "<>" ^^ (_ => NE)
-
- def isNull: Parser[SQLExpressionOperator] = "(?i)(is null)".r ^^ (_ => IS_NULL)
- def isNullExpression: Parser[SQLCriteria] = identifier ~ isNull ^^ { case i ~ _ => SQLIsNull(i) }
-
- def isNotNull: Parser[SQLExpressionOperator] = "(?i)(is not null)".r ^^ (_ => IS_NOT_NULL)
- def isNotNullExpression: Parser[SQLCriteria] = identifier ~ isNotNull ^^ { case i ~ _ =>
- SQLIsNotNull(i)
- }
-
- def equalityExpression: Parser[SQLExpression] =
- identifier ~ (eq | ne) ~ (boolean | literal | double | int) ^^ { case i ~ o ~ v =>
- SQLExpression(i, o, v)
- }
- def likeExpression: Parser[SQLExpression] = identifier ~ like ~ literal ^^ { case i ~ o ~ v =>
- SQLExpression(i, o, v)
- }
- def comparisonExpression: Parser[SQLExpression] =
- identifier ~ (ge | gt | le | lt) ~ (double | int | literal) ^^ { case i ~ o ~ v =>
- SQLExpression(i, o, v)
- }
-
- def inLiteralExpression: Parser[SQLCriteria] =
- identifier ~ not.? ~ in ~ start ~ rep1(literal ~ separator.?) ~ end ^^ {
- case i ~ n ~ _ ~ _ ~ v ~ _ => SQLIn(i, SQLLiteralValues(v map { _._1 }), n)
- }
- def inNumericalExpression: Parser[SQLCriteria] =
- identifier ~ not.? ~ in ~ start ~ rep1((double | int) ~ separator.?) ~ end ^^ {
- case i ~ n ~ _ ~ _ ~ v ~ _ => SQLIn(i, SQLNumericValues(v map { _._1 }), n)
- }
-
- def between: Parser[SQLExpressionOperator] = "(?i)between".r ^^ (_ => BETWEEN)
- def betweenExpression: Parser[SQLCriteria] = identifier ~ between ~ literal ~ and ~ literal ^^ {
- case i ~ _ ~ from ~ _ ~ to => SQLBetween(i, from, to)
- }
-
- def distance: Parser[SQLFunction] = "(?i)distance".r ^^ (_ => SQLDistance)
- def distanceExpression: Parser[SQLCriteria] =
- distance ~ start ~ identifier ~ separator ~ start ~ double ~ separator ~ double ~ end ~ end ~ le ~ literal ^^ {
- case _ ~ _ ~ i ~ _ ~ _ ~ lat ~ _ ~ lon ~ _ ~ _ ~ _ ~ d => ElasticGeoDistance(i, d, lat, lon)
- }
-
- def start: Parser[SQLDelimiter] = "(" ^^ (_ => StartPredicate)
- def end: Parser[SQLDelimiter] = ")" ^^ (_ => EndPredicate)
- def separator: Parser[SQLDelimiter] = "," ^^ (_ => Separator)
-
- def and: Parser[SQLPredicateOperator] = "(?i)and".r ^^ (_ => AND)
- def or: Parser[SQLPredicateOperator] = "(?i)or".r ^^ (_ => OR)
- def not: Parser[NOT.type] = "(?i)not".r ^^ (_ => NOT)
-
- def nested: Parser[ElasticOperator] = "(?i)nested".r ^^ (_ => NESTED)
- def child: Parser[ElasticOperator] = "(?i)child".r ^^ (_ => CHILD)
- def parent: Parser[ElasticOperator] = "(?i)parent".r ^^ (_ => PARENT)
-
- def criteria: Parser[SQLCriteria] =
- start.? ~ (equalityExpression | likeExpression | comparisonExpression | inLiteralExpression | inNumericalExpression | betweenExpression | isNotNullExpression | isNullExpression | distanceExpression) ~ end.? ^^ {
- case _ ~ c ~ _ =>
- c match {
- case x: SQLExpression if x.columnName.nested => ElasticNested(x)
- case y: SQLIn[_, _] if y.columnName.nested => ElasticNested(y)
- case z: SQLBetween if z.columnName.nested => ElasticNested(z)
- case n: SQLIsNull if n.columnName.nested => ElasticNested(n)
- case nn: SQLIsNotNull if nn.columnName.nested => ElasticNested(nn)
- case _ => c
- }
- }
-
- @scala.annotation.tailrec
- private def unwrappNested(nested: ElasticNested): SQLCriteria = {
- val c = nested.criteria
- c match {
- case x: ElasticNested => unwrappNested(x)
- case _ => c
- }
- }
-
- private def unwrappCriteria(criteria: SQLCriteria): SQLCriteria = {
- criteria match {
- case x: ElasticNested => unwrappNested(x)
- case _ => criteria
- }
- }
-
- private def unwrappPredicate(predicate: SQLPredicate): SQLPredicate = {
- var unwrapp = false
- val _left = predicate.leftCriteria match {
- case x: ElasticNested =>
- unwrapp = true
- unwrappNested(x)
- case l => l
- }
- val _right = predicate.rightCriteria match {
- case x: ElasticNested =>
- unwrapp = true
- unwrappNested(x)
- case r => r
- }
- if (unwrapp)
- SQLPredicate(_left, predicate.operator, _right)
- else
- predicate
- }
-
- def predicate: Parser[SQLPredicate] = criteria ~ (and | or) ~ not.? ~ criteria ^^ {
- case l ~ o ~ n ~ r => SQLPredicate(l, o, r, n)
- }
-
- def nestedCriteria: Parser[ElasticRelation] = nested ~ start.? ~ criteria ~ end.? ^^ {
- case _ ~ _ ~ c ~ _ => ElasticNested(unwrappCriteria(c))
- }
- def nestedPredicate: Parser[ElasticRelation] = nested ~ start ~ predicate ~ end ^^ {
- case _ ~ _ ~ p ~ _ => ElasticNested(unwrappPredicate(p))
- }
-
- def childCriteria: Parser[ElasticRelation] = child ~ start.? ~ criteria ~ end.? ^^ {
- case _ ~ _ ~ c ~ _ => ElasticChild(unwrappCriteria(c))
- }
- def childPredicate: Parser[ElasticRelation] = child ~ start ~ predicate ~ end ^^ {
- case _ ~ _ ~ p ~ _ => ElasticChild(unwrappPredicate(p))
- }
-
- def parentCriteria: Parser[ElasticRelation] = parent ~ start.? ~ criteria ~ end.? ^^ {
- case _ ~ _ ~ c ~ _ => ElasticParent(unwrappCriteria(c))
- }
- def parentPredicate: Parser[ElasticRelation] = parent ~ start ~ predicate ~ end ^^ {
- case _ ~ _ ~ p ~ _ => ElasticParent(unwrappPredicate(p))
- }
-
- def alias: Parser[SQLAlias] = "(?i)as".r ~ regexAlias.r ^^ { case _ ~ b => SQLAlias(b) }
-
- def count: Parser[SQLFunction] = "(?i)count".r ^^ (_ => SQLCount)
- def min: Parser[SQLFunction] = "(?i)min".r ^^ (_ => SQLMin)
- def max: Parser[SQLFunction] = "(?i)max".r ^^ (_ => SQLMax)
- def avg: Parser[SQLFunction] = "(?i)avg".r ^^ (_ => SQLAvg)
- def sum: Parser[SQLFunction] = "(?i)sum".r ^^ (_ => SQLSum)
-
- def _select: Parser[SELECT.type] = "(?i)select".r ^^ (_ => SELECT)
-
- def _filter: Parser[FILTER.type] = "(?i)filter".r ^^ (_ => FILTER)
-
- def _from: Parser[FROM.type] = "(?i)from".r ^^ (_ => FROM)
-
- def _where: Parser[WHERE.type] = "(?i)where".r ^^ (_ => WHERE)
-
- def _limit: Parser[LIMIT.type] = "(?i)limit".r ^^ (_ => LIMIT)
-
- def countFilter: Parser[SQLFilter] = _filter ~> "[" ~> whereCriteria <~ "]" ^^ { case rawTokens =>
- SQLFilter(
- processTokens(rawTokens, None, None, None) match {
- case Some(c) => Some(unwrappCriteria(c))
- case _ => None
- }
- )
- }
-
- def countField: Parser[SQLCountField] =
- count ~ start ~ identifier ~ end ~ alias.? ~ countFilter.? ^^ { case _ ~ _ ~ i ~ _ ~ a ~ f =>
- new SQLCountField(i, a, f)
- }
-
- def field: Parser[SQLField] =
- (min | max | avg | sum).? ~ start.? ~ identifier ~ end.? ~ alias.? ^^ {
- case f ~ _ ~ i ~ _ ~ a => SQLField(f, i, a)
- }
-
- def selectCount: Parser[SQLSelect] = _select ~ rep1sep(countField, separator) ^^ {
- case _ ~ fields => new SQLSelectCount(fields)
- }
-
- def select: Parser[SQLSelect] = _select ~ rep1sep(field, separator) ^^ { case _ ~ fields =>
- SQLSelect(fields)
- }
-
- def table: Parser[SQLTable] = identifier ~ alias.? ^^ { case i ~ a => SQLTable(i, a) }
-
- def from: Parser[SQLFrom] = _from ~ rep1sep(table, separator) ^^ { case _ ~ tables =>
- SQLFrom(tables)
- }
-
- def allPredicate: SQLParser.Parser[SQLCriteria] =
- nestedPredicate | childPredicate | parentPredicate | predicate
-
- def allCriteria: SQLParser.Parser[SQLCriteria] =
- nestedCriteria | childCriteria | parentCriteria | criteria
-
- def whereCriteria: SQLParser.Parser[List[SQLToken]] = rep1(
- allPredicate | allCriteria | start | or | and | end
- )
-
- def where: Parser[SQLWhere] = _where ~ whereCriteria ^^ { case _ ~ rawTokens =>
- SQLWhere(processTokens(rawTokens, None, None, None))
- }
-
- def limit: SQLParser.Parser[SQLLimit] = _limit ~ int ^^ { case _ ~ i => SQLLimit(i.value) }
-
- def tokens: Parser[_ <: SQLSelectQuery] = {
- phrase((selectCount | select) ~ from ~ where.? ~ limit.?) ^^ { case s ~ f ~ w ~ l =>
- s match {
- case x: SQLSelectCount => new SQLCountQuery(x, f, w, l)
- case _ => SQLSelectQuery(s, f, w, l)
- }
- }
- }
-
- def apply(query: String): Either[SQLParserError, SQLSelectQuery] = {
- parse(tokens, query) match {
- case NoSuccess(msg, _) =>
- println(msg)
- Left(SQLParserError(msg))
- case Success(result, _) => Right(result)
- }
- }
-
- @scala.annotation.tailrec
- private def processTokens(
- tokens: List[SQLToken],
- left: Option[SQLCriteria],
- operator: Option[SQLPredicateOperator],
- right: Option[SQLCriteria]
- ): Option[SQLCriteria] = {
- tokens.headOption match {
- case Some(c: SQLCriteria) if left.isEmpty =>
- processTokens(tokens.tail, Some(c), operator, right)
-
- case Some(c: SQLCriteria) if left.isDefined && operator.isDefined && right.isEmpty =>
- processTokens(tokens.tail, left, operator, Some(c))
-
- case Some(_: StartDelimiter) => processTokens(tokens.tail, left, operator, right)
-
- case Some(_: EndDelimiter) if left.isDefined && operator.isDefined && right.isDefined =>
- processTokens(
- tokens.tail,
- Some(SQLPredicate(left.get, operator.get, right.get)),
- None,
- None
- )
-
- case Some(_: EndDelimiter) => processTokens(tokens.tail, left, operator, right)
-
- case Some(o: SQLPredicateOperator) if operator.isEmpty =>
- processTokens(tokens.tail, left, Some(o), right)
-
- case Some(o: SQLPredicateOperator)
- if left.isDefined && operator.isDefined && right.isDefined =>
- processTokens(
- tokens.tail,
- Some(SQLPredicate(left.get, operator.get, right.get)),
- Some(o),
- None
- )
-
- case None if left.isDefined && operator.isDefined && right.isDefined =>
- Some(SQLPredicate(left.get, operator.get, right.get))
-
- case None => left
-
- }
- }
-}
-
-trait SQLCompilationError
-case class SQLParserError(msg: String) extends SQLCompilationError
diff --git a/elastic/src/main/scala/app/softnetwork/elastic/sql/package.scala b/elastic/src/main/scala/app/softnetwork/elastic/sql/package.scala
deleted file mode 100644
index 20fbbf83..00000000
--- a/elastic/src/main/scala/app/softnetwork/elastic/sql/package.scala
+++ /dev/null
@@ -1,406 +0,0 @@
-package app.softnetwork.elastic
-
-import java.util.regex.Pattern
-
-import scala.reflect.runtime.universe._
-
-import scala.util.Try
-
-/** Created by smanciot on 27/06/2018.
- */
-package object sql {
-
- import scala.language.implicitConversions
-
- implicit def asString(token: Option[_ <: SQLToken]): String = token match {
- case Some(t) => t.sql
- case _ => ""
- }
-
- sealed trait SQLToken extends Serializable {
- def sql: String
- override def toString: String = sql
- }
-
- abstract class SQLExpr(override val sql: String) extends SQLToken
-
- case object SELECT extends SQLExpr("select")
- case object FILTER extends SQLExpr("filter")
- case object FROM extends SQLExpr("from")
- case object WHERE extends SQLExpr("where")
- case object LIMIT extends SQLExpr("limit")
-
- case class SQLLimit(limit: Int) extends SQLExpr(s"limit $limit")
-
- case class SQLIdentifier(
- identifier: String,
- alias: Option[String] = None,
- distinct: Option[String] = None
- ) extends SQLExpr(
- if (alias.isDefined)
- s"${distinct.getOrElse("")} ${alias.get}.$identifier".trim
- else
- s"${distinct.getOrElse("")} $identifier".trim
- )
- with SQLSource {
- lazy val nested: Boolean = identifier.contains('.') && !identifier.endsWith(".raw")
- }
-
- abstract class SQLValue[+T](val value: T)(implicit ev$1: T => Ordered[T]) extends SQLToken {
- def choose[R >: T](
- values: Seq[R],
- operator: Option[SQLExpressionOperator],
- separator: String = "|"
- )(implicit ev: R => Ordered[R]): Option[R] = {
- if (values.isEmpty)
- None
- else
- operator match {
- case Some(_: EQ.type) => values.find(_ == value)
- case Some(_: NE.type) => values.find(_ != value)
- case Some(_: GE.type) => values.filter(_ >= value).sorted.reverse.headOption
- case Some(_: GT.type) => values.filter(_ > value).sorted.reverse.headOption
- case Some(_: LE.type) => values.filter(_ <= value).sorted.headOption
- case Some(_: LT.type) => values.filter(_ < value).sorted.headOption
- case _ => values.headOption
- }
- }
- }
-
- case class SQLBoolean(value: Boolean) extends SQLToken {
- override def sql: String = s"$value"
- }
-
- case class SQLLiteral(override val value: String) extends SQLValue[String](value) {
- override def sql: String = s""""$value""""
- import SQLImplicits._
- private lazy val pattern: Pattern = value.pattern
- def like: Seq[String] => Boolean = {
- _.exists { pattern.matcher(_).matches() }
- }
- def eq: Seq[String] => Boolean = {
- _.exists { _.contentEquals(value) }
- }
- def ne: Seq[String] => Boolean = {
- _.forall { !_.contentEquals(value) }
- }
- override def choose[R >: String](
- values: Seq[R],
- operator: Option[SQLExpressionOperator],
- separator: String = "|"
- )(implicit ev: R => Ordered[R]): Option[R] = {
- operator match {
- case Some(_: EQ.type) => values.find(v => v.toString contentEquals value)
- case Some(_: NE.type) => values.find(v => !(v.toString contentEquals value))
- case Some(_: LIKE.type) => values.find(v => pattern.matcher(v.toString).matches())
- case None => Some(values.mkString(separator))
- case _ => super.choose(values, operator, separator)
- }
- }
- }
-
- abstract class SQLNumeric[+T](override val value: T)(implicit ev$1: T => Ordered[T])
- extends SQLValue[T](value) {
- override def sql: String = s"$value"
- override def choose[R >: T](
- values: Seq[R],
- operator: Option[SQLExpressionOperator],
- separator: String = "|"
- )(implicit ev: R => Ordered[R]): Option[R] = {
- operator match {
- case None => if (values.isEmpty) None else Some(values.max)
- case _ => super.choose(values, operator, separator)
- }
- }
- }
-
- case class SQLInt(override val value: Int) extends SQLNumeric[Int](value) {
- def max: Seq[Int] => Int = x => Try(x.max).getOrElse(0)
- def min: Seq[Int] => Int = x => Try(x.min).getOrElse(0)
- def eq: Seq[Int] => Boolean = {
- _.exists { _ == value }
- }
- def ne: Seq[Int] => Boolean = {
- _.forall { _ != value }
- }
- }
-
- case class SQLDouble(override val value: Double) extends SQLNumeric[Double](value) {
- def max: Seq[Double] => Double = x => Try(x.max).getOrElse(0)
- def min: Seq[Double] => Double = x => Try(x.min).getOrElse(0)
- def eq: Seq[Double] => Boolean = {
- _.exists { _ == value }
- }
- def ne: Seq[Double] => Boolean = {
- _.forall { _ != value }
- }
- }
-
- sealed abstract class SQLValues[+R: TypeTag, +T <: SQLValue[R]](val values: Seq[T])
- extends SQLToken {
- override def sql = s"(${values.map(_.sql).mkString(",")})"
- lazy val innerValues: Seq[R] = values.map(_.value)
- }
-
- case class SQLLiteralValues(override val values: Seq[SQLLiteral])
- extends SQLValues[String, SQLValue[String]](values) {
- def eq: Seq[String] => Boolean = {
- _.exists { s => innerValues.exists(_.contentEquals(s)) }
- }
- def ne: Seq[String] => Boolean = {
- _.forall { s => innerValues.forall(!_.contentEquals(s)) }
- }
- }
-
- case class SQLNumericValues[R: TypeTag](override val values: Seq[SQLNumeric[R]])
- extends SQLValues[R, SQLNumeric[R]](values) {
- def eq: Seq[R] => Boolean = {
- _.exists { n => innerValues.contains(n) }
- }
- def ne: Seq[R] => Boolean = {
- _.forall { n => !innerValues.contains(n) }
- }
- }
-
- sealed trait SQLOperator extends SQLToken
-
- sealed trait SQLExpressionOperator extends SQLOperator
-
- case object EQ extends SQLExpr("=") with SQLExpressionOperator
- case object GE extends SQLExpr(">=") with SQLExpressionOperator
- case object GT extends SQLExpr(">") with SQLExpressionOperator
- case object IN extends SQLExpr("in") with SQLExpressionOperator
- case object LE extends SQLExpr("<=") with SQLExpressionOperator
- case object LIKE extends SQLExpr("like") with SQLExpressionOperator
- case object LT extends SQLExpr("<") with SQLExpressionOperator
- case object NE extends SQLExpr("<>") with SQLExpressionOperator
- case object BETWEEN extends SQLExpr("between") with SQLExpressionOperator
- case object IS_NULL extends SQLExpr("is null") with SQLExpressionOperator
- case object IS_NOT_NULL extends SQLExpr("is not null") with SQLExpressionOperator
-
- sealed trait SQLPredicateOperator extends SQLOperator
-
- case object AND extends SQLPredicateOperator { override val sql: String = "and" }
- case object OR extends SQLPredicateOperator { override val sql: String = "or" }
- case object NOT extends SQLPredicateOperator { override val sql: String = "not" }
-
- sealed trait SQLCriteria extends SQLToken {
- def operator: SQLOperator
- }
-
- case class SQLExpression(
- columnName: SQLIdentifier,
- operator: SQLExpressionOperator,
- value: SQLToken
- ) extends SQLCriteria {
- override def sql = s"$columnName ${operator.sql} $value"
- }
-
- case class SQLIsNull(columnName: SQLIdentifier) extends SQLCriteria {
- override val operator: SQLOperator = IS_NULL
- override def sql = s"$columnName ${operator.sql}"
- }
-
- case class SQLIsNotNull(columnName: SQLIdentifier) extends SQLCriteria {
- override val operator: SQLOperator = IS_NOT_NULL
- override def sql = s"$columnName ${operator.sql}"
- }
-
- case class SQLIn[R, +T <: SQLValue[R]](
- columnName: SQLIdentifier,
- values: SQLValues[R, T],
- not: Option[NOT.type] = None
- ) extends SQLCriteria {
- override def sql =
- s"$columnName ${not.map(_ => "not ").getOrElse("")}${operator.sql} ${values.sql}"
- override def operator: SQLOperator = IN
- }
-
- case class SQLBetween(columnName: SQLIdentifier, from: SQLLiteral, to: SQLLiteral)
- extends SQLCriteria {
- override def sql = s"$columnName ${operator.sql} ${from.sql} and ${to.sql}"
- override def operator: SQLOperator = BETWEEN
- }
-
- case class ElasticGeoDistance(
- columnName: SQLIdentifier,
- distance: SQLLiteral,
- lat: SQLDouble,
- lon: SQLDouble
- ) extends SQLCriteria {
- override def sql = s"${operator.sql}($columnName,(${lat.sql},${lon.sql})) <= ${distance.sql}"
- override def operator: SQLOperator = SQLDistance
- }
-
- case class SQLPredicate(
- leftCriteria: SQLCriteria,
- operator: SQLPredicateOperator,
- rightCriteria: SQLCriteria,
- not: Option[NOT.type] = None
- ) extends SQLCriteria {
- val leftParentheses: Boolean = leftCriteria match {
- case _: ElasticRelation => false
- case _ => true
- }
- val rightParentheses: Boolean = rightCriteria match {
- case _: ElasticRelation => false
- case _ => true
- }
- override def sql = s"${if (leftParentheses) s"(${leftCriteria.sql})"
- else leftCriteria.sql} ${operator.sql}${not
- .map(_ => " not")
- .getOrElse("")} ${if (rightParentheses) s"(${rightCriteria.sql})" else rightCriteria.sql}"
- }
-
- sealed trait ElasticOperator extends SQLOperator
- case object NESTED extends SQLExpr("nested") with ElasticOperator
- case object CHILD extends SQLExpr("child") with ElasticOperator
- case object PARENT extends SQLExpr("parent") with ElasticOperator
-
- sealed abstract class ElasticRelation(val criteria: SQLCriteria, val operator: ElasticOperator)
- extends SQLCriteria {
- override def sql = s"${operator.sql}(${criteria.sql})"
- def _retrieveType(criteria: SQLCriteria): Option[String] = criteria match {
- case SQLPredicate(left, _, _, _) => _retrieveType(left)
- case SQLBetween(col, _, _) => Some(col.identifier.split("\\.").head)
- case SQLExpression(col, _, _) => Some(col.identifier.split("\\.").head)
- case SQLIn(col, _, _) => Some(col.identifier.split("\\.").head)
- case SQLIsNull(col) => Some(col.identifier.split("\\.").head)
- case SQLIsNotNull(col) => Some(col.identifier.split("\\.").head)
- case relation: ElasticRelation => relation.`type`
- case _ => None
- }
- lazy val `type`: Option[String] = _retrieveType(criteria)
- }
-
- case class ElasticNested(override val criteria: SQLCriteria)
- extends ElasticRelation(criteria, NESTED)
-
- case class ElasticChild(override val criteria: SQLCriteria)
- extends ElasticRelation(criteria, CHILD)
-
- case class ElasticParent(override val criteria: SQLCriteria)
- extends ElasticRelation(criteria, PARENT)
-
- sealed trait SQLDelimiter extends SQLToken
- trait StartDelimiter extends SQLDelimiter
- trait EndDelimiter extends SQLDelimiter
- case object StartPredicate extends SQLExpr("(") with StartDelimiter
- case object EndPredicate extends SQLExpr(")") with EndDelimiter
- case object Separator extends SQLExpr(",") with EndDelimiter
-
- def choose[T](
- values: Seq[T],
- criteria: Option[SQLCriteria],
- function: Option[SQLFunction] = None
- )(implicit ev$1: T => Ordered[T]): Option[T] = {
- criteria match {
- case Some(SQLExpression(_, operator, value: SQLValue[T] @unchecked)) =>
- value.choose[T](values, Some(operator))
- case _ =>
- function match {
- case Some(_: SQLMin.type) => Some(values.min)
- case Some(_: SQLMax.type) => Some(values.max)
- // FIXME case Some(_: SQLSum.type) => Some(values.sum)
- // FIXME case Some(_: SQLAvg.type) => Some(values.sum / values.length )
- case _ => values.headOption
- }
- }
- }
-
- def toRegex(value: String): String = {
- val startWith = value.startsWith("%")
- val endWith = value.endsWith("%")
- val v =
- if (startWith && endWith)
- value.substring(1, value.length - 1)
- else if (startWith)
- value.substring(1)
- else if (endWith)
- value.substring(0, value.length - 1)
- else
- value
- s"""${if (startWith) ".*?"}$v${if (endWith) ".*?"}"""
- }
-
- case class SQLAlias(alias: String) extends SQLExpr(s" as $alias")
-
- sealed trait SQLFunction extends SQLToken
- case object SQLCount extends SQLExpr("count") with SQLFunction
- case object SQLMin extends SQLExpr("min") with SQLFunction
- case object SQLMax extends SQLExpr("max") with SQLFunction
- case object SQLAvg extends SQLExpr("avg") with SQLFunction
- case object SQLSum extends SQLExpr("sum") with SQLFunction
- case object SQLDistance extends SQLExpr("distance") with SQLFunction with SQLOperator
-
- case class SQLField(
- func: Option[SQLFunction] = None,
- identifier: SQLIdentifier,
- alias: Option[SQLAlias] = None
- ) extends SQLToken {
- override def sql: String =
- func match {
- case Some(f) => s"${f.sql}(${identifier.sql})${asString(alias)}"
- case _ => s"${identifier.sql}${asString(alias)}"
- }
- }
-
- class SQLCountField(
- override val identifier: SQLIdentifier,
- override val alias: Option[SQLAlias] = None,
- val filter: Option[SQLFilter] = None
- ) extends SQLField(Some(SQLCount), identifier, alias)
-
- case class SQLSelect(fields: Seq[SQLField] = Seq(SQLField(identifier = SQLIdentifier("*"))))
- extends SQLToken {
- override def sql: String = s"$SELECT ${fields.map(_.sql).mkString(",")}"
- }
-
- class SQLSelectCount(
- val countFields: Seq[SQLCountField] = Seq(new SQLCountField(identifier = SQLIdentifier("*")))
- ) extends SQLSelect(countFields)
-
- sealed trait SQLSource extends SQLToken
-
- case class SQLTable(source: SQLSource, alias: Option[SQLAlias] = None) extends SQLToken {
- override def sql: String = s"$source${asString(alias)}"
- }
-
- case class SQLFrom(tables: Seq[SQLTable]) extends SQLToken {
- override def sql: String = s" $FROM ${tables.map(_.sql).mkString(",")}"
- }
-
- case class SQLWhere(criteria: Option[SQLCriteria]) extends SQLToken {
- override def sql: String = criteria match {
- case Some(c) => s" $WHERE ${c.sql}"
- case _ => ""
- }
- }
-
- case class SQLFilter(criteria: Option[SQLCriteria]) extends SQLToken {
- override def sql: String = criteria match {
- case Some(c) => s" $FILTER($c)"
- case _ => ""
- }
- }
-
- case class SQLSelectQuery(
- select: SQLSelect = SQLSelect(),
- from: SQLFrom,
- where: Option[SQLWhere],
- limit: Option[SQLLimit] = None
- ) extends SQLToken {
- override def sql: String = s"${select.sql}${from.sql}${asString(where)}${asString(limit)}"
- }
-
- class SQLCountQuery(
- val selectCount: SQLSelectCount = new SQLSelectCount(),
- from: SQLFrom,
- where: Option[SQLWhere],
- limit: Option[SQLLimit] = None
- ) extends SQLSelectQuery(selectCount, from, where)
-
- case class SQLQuery(query: String)
-
- case class SQLQueries(queries: List[SQLQuery])
-}
diff --git a/elastic/src/test/scala/app/softnetwork/elastic/sql/ElasticFiltersSpec.scala b/elastic/src/test/scala/app/softnetwork/elastic/sql/ElasticFiltersSpec.scala
deleted file mode 100644
index 0283aed3..00000000
--- a/elastic/src/test/scala/app/softnetwork/elastic/sql/ElasticFiltersSpec.scala
+++ /dev/null
@@ -1,756 +0,0 @@
-package app.softnetwork.elastic.sql
-
-import com.sksamuel.elastic4s.http.search.SearchBodyBuilderFn
-import com.sksamuel.elastic4s.searches.SearchRequest
-import com.sksamuel.elastic4s.searches.queries.Query
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should.Matchers
-
-/** Created by smanciot on 13/04/17.
- */
-class ElasticFiltersSpec extends AnyFlatSpec with Matchers {
-
- import Queries._
-
- import scala.language.implicitConversions
-
- def query2String(result: Query): String = {
- SearchBodyBuilderFn(SearchRequest("*") query result).string()
- }
-
- "ElasticFilters" should "filter numerical eq" in {
- val result = ElasticFilters.filter(numericalEq)
- query2String(result) shouldBe """{
-
- |"query":{
- | "term" : {
- | "identifier" : {
- | "value" : "1.0"
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter numerical ne" in {
- val result = ElasticFilters.filter(numericalNe)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool":{
- | "must_not":[
- | {
- | "term":{
- | "identifier":{
- | "value":"1"
- | }
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter numerical lt" in {
- val result = ElasticFilters.filter(numericalLt)
- query2String(result) shouldBe """{
-
- |"query":{
- | "range" : {
- | "identifier" : {
- | "lt" : "1"
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter numerical le" in {
- val result = ElasticFilters.filter(numericalLe)
- query2String(result) shouldBe """{
-
- |"query":{
- | "range" : {
- | "identifier" : {
- | "lte" : "1"
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter numerical gt" in {
- val result = ElasticFilters.filter(numericalGt)
- query2String(result) shouldBe """{
-
- |"query":{
- | "range" : {
- | "identifier" : {
- | "gt" : "1"
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter numerical ge" in {
- val result = ElasticFilters.filter(numericalGe)
- query2String(result) shouldBe """{
-
- |"query":{
- | "range" : {
- | "identifier" : {
- | "gte" : "1"
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter literal eq" in {
- val result = ElasticFilters.filter(literalEq)
- query2String(result) shouldBe """{
-
- |"query":{
- | "term" : {
- | "identifier" : {
- | "value" : "un"
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter literal ne" in {
- val result = ElasticFilters.filter(literalNe)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "must_not" : [
- | {
- | "term" : {
- | "identifier" : {
- | "value" : "un"
- | }
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter literal like" in {
- val result = ElasticFilters.filter(literalLike)
- query2String(result) shouldBe """{
-
- |"query":{
- | "regexp" : {
- | "identifier" : {
- | "value" : ".*?un.*?"
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter between" in {
- val result = ElasticFilters.filter(betweenExpression)
- query2String(result) shouldBe """{
-
- |"query":{
- | "range" : {
- | "identifier" : {
- | "gte" : "1",
- | "lte" : "2"
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter and predicate" in {
- val result = ElasticFilters.filter(andPredicate)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "filter" : [
- | {
- | "term" : {
- | "identifier1" : {
- | "value" : "1"
- | }
- | }
- | },
- | {
- | "range" : {
- | "identifier2" : {
- | "gt" : "2"
- | }
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter or predicate" in {
- val result = ElasticFilters.filter(orPredicate)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "should" : [
- | {
- | "term" : {
- | "identifier1" : {
- | "value" : "1"
- | }
- | }
- | },
- | {
- | "range" : {
- | "identifier2" : {
- | "gt" : "2"
- | }
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter left predicate with criteria" in {
- val result = ElasticFilters.filter(leftPredicate)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "should" : [
- | {
- | "bool" : {
- | "filter" : [
- | {
- | "term" : {
- | "identifier1" : {
- | "value" : "1"
- | }
- | }
- | },
- | {
- | "range" : {
- | "identifier2" : {
- | "gt" : "2"
- | }
- | }
- | }
- | ]
- | }
- | },
- | {
- | "term" : {
- | "identifier3" : {
- | "value" : "3"
- | }
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter right predicate with criteria" in {
- val result = ElasticFilters.filter(rightPredicate)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "filter" : [
- | {
- | "term" : {
- | "identifier1" : {
- | "value" : "1"
- | }
- | }
- | },
- | {
- | "bool" : {
- | "should" : [
- | {
- | "range" : {
- | "identifier2" : {
- | "gt" : "2"
- | }
- | }
- | },
- | {
- | "term" : {
- | "identifier3" : {
- | "value" : "3"
- | }
- | }
- | }
- | ]
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter multiple predicates" in {
- val result = ElasticFilters.filter(predicates)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "should" : [
- | {
- | "bool" : {
- | "filter" : [
- | {
- | "term" : {
- | "identifier1" : {
- | "value" : "1"
- | }
- | }
- | },
- | {
- | "range" : {
- | "identifier2" : {
- | "gt" : "2"
- | }
- | }
- | }
- | ]
- | }
- | },
- | {
- | "bool" : {
- | "filter" : [
- | {
- | "term" : {
- | "identifier3" : {
- | "value" : "3"
- | }
- | }
- | },
- | {
- | "term" : {
- | "identifier4" : {
- | "value" : "4"
- | }
- | }
- | }
- | ]
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter in literal expression" in {
- val result = ElasticFilters.filter(inLiteralExpression)
- query2String(result) shouldBe """{
-
- |"query":{
- | "terms" : {
- | "identifier" : [
- | "val1",
- | "val2",
- | "val3"
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter in numerical expression with Int values" in {
- val result = ElasticFilters.filter(inNumericalExpressionWithIntValues)
- query2String(result) shouldBe """{
-
- |"query":{
- | "terms" : {
- | "identifier" : [
- | 1,
- | 2,
- | 3
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter in numerical expression with Double values" in {
- val result = ElasticFilters.filter(inNumericalExpressionWithDoubleValues)
- query2String(result) shouldBe """{
-
- |"query":{
- | "terms" : {
- | "identifier" : [
- | 1.0,
- | 2.1,
- | 3.4
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter nested predicate" in {
- val result = ElasticFilters.filter(nestedPredicate)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "filter" : [
- | {
- | "term" : {
- | "identifier1" : {
- | "value" : "1"
- | }
- | }
- | },
- | {
- | "nested" : {
- | "path" : "nested",
- | "query" : {
- | "bool" : {
- | "should" : [
- | {
- | "range" : {
- | "nested.identifier2" : {
- | "gt" : "2"
- | }
- | }
- | },
- | {
- | "term" : {
- | "nested.identifier3" : {
- | "value" : "3"
- | }
- | }
- | }
- | ]
- | }
- | },
- | "inner_hits":{"name":"nested"}
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter nested criteria" in {
- val result = ElasticFilters.filter(nestedCriteria)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "filter" : [
- | {
- | "term" : {
- | "identifier1" : {
- | "value" : "1"
- | }
- | }
- | },
- | {
- | "nested" : {
- | "path" : "nested",
- | "query" : {
- | "term" : {
- | "nested.identifier3" : {
- | "value" : "3"
- | }
- | }
- | },
- | "inner_hits":{"name":"nested"}
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter child predicate" in {
- val result = ElasticFilters.filter(childPredicate)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "filter" : [
- | {
- | "term" : {
- | "identifier1" : {
- | "value" : "1"
- | }
- | }
- | },
- | {
- | "has_child" : {
- | "type" : "child",
- | "score_mode" : "none",
- | "query" : {
- | "bool" : {
- | "should" : [
- | {
- | "range" : {
- | "child.identifier2" : {
- | "gt" : "2"
- | }
- | }
- | },
- | {
- | "term" : {
- | "child.identifier3" : {
- | "value" : "3"
- | }
- | }
- | }
- | ]
- | }
- | }
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter child criteria" in {
- val result = ElasticFilters.filter(childCriteria)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "filter" : [
- | {
- | "term" : {
- | "identifier1" : {
- | "value" : "1"
- | }
- | }
- | },
- | {
- | "has_child" : {
- | "type" : "child",
- | "score_mode" : "none",
- | "query" : {
- | "term" : {
- | "child.identifier3" : {
- | "value" : "3"
- | }
- | }
- | }
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter parent predicate" in {
- val result = ElasticFilters.filter(parentPredicate)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "filter" : [
- | {
- | "term" : {
- | "identifier1" : {
- | "value" : "1"
- | }
- | }
- | },
- | {
- | "has_parent" : {
- | "parent_type" : "parent",
- | "query" : {
- | "bool" : {
- | "should" : [
- | {
- | "range" : {
- | "parent.identifier2" : {
- | "gt" : "2"
- | }
- | }
- | },
- | {
- | "term" : {
- | "parent.identifier3" : {
- | "value" : "3"
- | }
- | }
- | }
- | ]
- | }
- | }
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter parent criteria" in {
- val result = ElasticFilters.filter(parentCriteria)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "filter" : [
- | {
- | "term" : {
- | "identifier1" : {
- | "value" : "1"
- | }
- | }
- | },
- | {
- | "has_parent" : {
- | "parent_type" : "parent",
- | "query" : {
- | "term" : {
- | "parent.identifier3" : {
- | "value" : "3"
- | }
- | }
- | }
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter nested with between" in {
- val result = ElasticFilters.filter(nestedWithBetween)
- query2String(result) shouldBe """{
-
- |"query":{
- | "nested" : {
- | "path" : "ciblage",
- | "query" : {
- | "bool" : {
- | "filter" : [
- | {
- | "range" : {
- | "ciblage.Archivage_CreationDate" : {
- | "gte" : "now-3M/M",
- | "lte" : "now"
- | }
- | }
- | },
- | {
- | "term" : {
- | "ciblage.statutComportement" : {
- | "value" : "1"
- | }
- | }
- | }
- | ]
- | }
- | },
- | "inner_hits":{"name":"ciblage"}
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter boolean eq" in {
- val result = ElasticFilters.filter(boolEq)
- query2String(result) shouldBe """{
-
- |"query":{
- | "term" : {
- | "identifier" : {
- | "value" : true
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter boolean ne" in {
- val result = ElasticFilters.filter(boolNe)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "must_not" : [
- | {
- | "term" : {
- | "identifier" : {
- | "value" : false
- | }
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter is null" in {
- val result = ElasticFilters.filter(isNull)
- query2String(result) shouldBe """{
-
- |"query":{
- | "bool" : {
- | "must_not" : [
- | {
- | "exists" : {
- | "field" : "identifier"
- | }
- | }
- | ]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter is not null" in {
- val result = ElasticFilters.filter(isNotNull)
- query2String(result) shouldBe """{
-
- |"query":{
- | "exists" : {
- | "field" : "identifier"
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
- it should "filter geo distance criteria" in {
- val result = ElasticFilters.filter(geoDistanceCriteria)
- query2String(result) shouldBe
- """{
-
- |"query":{
- | "geo_distance" : {
- | "distance":"5km",
- | "profile.location":[40.0,-70.0]
- | }
- | }
- |}""".stripMargin.replaceAll("\\s", "")
- }
-
-}
diff --git a/elastic/src/test/scala/app/softnetwork/elastic/sql/ElasticQuerySpec.scala b/elastic/src/test/scala/app/softnetwork/elastic/sql/ElasticQuerySpec.scala
deleted file mode 100644
index 8ca199d1..00000000
--- a/elastic/src/test/scala/app/softnetwork/elastic/sql/ElasticQuerySpec.scala
+++ /dev/null
@@ -1,467 +0,0 @@
-package app.softnetwork.elastic.sql
-
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should.Matchers
-
-/** Created by smanciot on 13/04/17.
- */
-class ElasticQuerySpec extends AnyFlatSpec with Matchers {
-
- import scala.language.implicitConversions
-
- "ElasticQuery" should "perform native count" in {
- val results = ElasticQuery.count(
- SQLQuery("select count($t.id) as c2 from Table as t where $t.nom = \"Nom\"")
- )
- results.size shouldBe 1
- val result = results.head
- result.nested shouldBe false
- result.distinct shouldBe false
- result.agg shouldBe "agg_id"
- result.field shouldBe "c2"
- result.sources shouldBe Seq[String]("Table")
- result.query shouldBe
- """|{
- | "query": {
- | "bool": {
- | "filter": [
- | {
- | "term": {
- | "nom": {
- | "value": "Nom"
- | }
- | }
- | }
- | ]
- | }
- | },
- | "size": 0,
- | "aggs": {
- | "agg_id": {
- | "value_count": {
- | "field": "id"
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s+", "")
- }
-
- it should "perform count distinct" in {
- val results = ElasticQuery.count(
- SQLQuery("select count(distinct $t.id) as c2 from Table as t where $t.nom = \"Nom\"")
- )
- results.size shouldBe 1
- val result = results.head
- result.nested shouldBe false
- result.distinct shouldBe true
- result.agg shouldBe "agg_distinct_id"
- result.field shouldBe "c2"
- result.sources shouldBe Seq[String]("Table")
- result.query shouldBe
- """|{
- | "query": {
- | "bool": {
- | "filter": [
- | {
- | "term": {
- | "nom": {
- | "value": "Nom"
- | }
- | }
- | }
- | ]
- | }
- | },
- | "size": 0,
- | "aggs": {
- | "agg_distinct_id": {
- | "cardinality": {
- | "field": "id"
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s+", "")
- }
-
- it should "perform nested count" in {
- val results = ElasticQuery.count(
- SQLQuery("select count(email.value) as email from index where nom = \"Nom\"")
- )
- results.size shouldBe 1
- val result = results.head
- result.nested shouldBe true
- result.distinct shouldBe false
- result.agg shouldBe "nested_email.agg_email_value"
- result.field shouldBe "email"
- result.sources shouldBe Seq[String]("index")
- result.query shouldBe
- """{
- | "query": {
- | "bool": {
- | "filter": [
- | {
- | "term": {
- | "nom": {
- | "value": "Nom"
- | }
- | }
- | }
- | ]
- | }
- | },
- | "size": 0,
- | "aggs": {
- | "nested_email": {
- | "nested": {
- | "path": "email"
- | },
- | "aggs": {
- | "agg_email_value": {
- | "value_count": {
- | "field": "email.value"
- | }
- | }
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s+", "")
- }
-
- it should "perform nested count with nested criteria" in {
- val results = ElasticQuery.count(
- SQLQuery(
- "select count(email.value) as email from index where nom = \"Nom\" and (profile.postalCode in (\"75001\",\"75002\"))"
- )
- )
- results.size shouldBe 1
- val result = results.head
- result.nested shouldBe true
- result.distinct shouldBe false
- result.agg shouldBe "nested_email.agg_email_value"
- result.field shouldBe "email"
- result.sources shouldBe Seq[String]("index")
- result.query shouldBe
- """{
- | "query": {
- | "bool": {
- | "filter": [
- | {
- | "term": {
- | "nom": {
- | "value": "Nom"
- | }
- | }
- | },
- | {
- | "nested": {
- | "path": "profile",
- | "query": {
- | "terms": {
- | "profile.postalCode": [
- | "75001",
- | "75002"
- | ]
- | }
- | },
- | "inner_hits":{"name":"profile"}
- | }
- | }
- | ]
- | }
- | },
- | "size": 0,
- | "aggs": {
- | "nested_email": {
- | "nested": {
- | "path": "email"
- | },
- | "aggs": {
- | "agg_email_value": {
- | "value_count": {
- | "field": "email.value"
- | }
- | }
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s+", "")
- }
-
- it should "perform nested count with filter" in {
- val results = ElasticQuery.count(
- SQLQuery(
- "select count(email.value) as email filter[email.context = \"profile\"] from index where nom = \"Nom\" and (profile.postalCode in (\"75001\",\"75002\"))"
- )
- )
- results.size shouldBe 1
- val result = results.head
- result.nested shouldBe true
- result.distinct shouldBe false
- result.agg shouldBe "nested_email.filtered_agg.agg_email_value"
- result.field shouldBe "email"
- result.sources shouldBe Seq[String]("index")
- result.query shouldBe
- """{
- | "query": {
- | "bool": {
- | "filter": [
- | {
- | "term": {
- | "nom": {
- | "value": "Nom"
- | }
- | }
- | },
- | {
- | "nested": {
- | "path": "profile",
- | "query": {
- | "terms": {
- | "profile.postalCode": [
- | "75001",
- | "75002"
- | ]
- | }
- | },
- | "inner_hits":{"name":"profile"}
- | }
- | }
- | ]
- | }
- | },
- | "size": 0,
- | "aggs": {
- | "nested_email": {
- | "nested": {
- | "path": "email"
- | },
- | "aggs": {
- | "filtered_agg": {
- | "filter": {
- | "term": {
- | "email.context": {
- | "value": "profile"
- | }
- | }
- | },
- | "aggs": {
- | "agg_email_value": {
- | "value_count": {
- | "field": "email.value"
- | }
- | }
- | }
- | }
- | }
- | }
- | }
- |}""".stripMargin.replaceAll("\\s+", "")
- }
-
- it should "accept and not operator" in {
- val results = ElasticQuery.count(
- SQLQuery(
- "select count(distinct email.value) as email from index where (profile.postalCode = \"33600\" and not profile.postalCode = \"75001\")"
- )
- )
- results.size shouldBe 1
- val result = results.head
- result.nested shouldBe true
- result.distinct shouldBe true
- result.agg shouldBe "nested_email.agg_distinct_email_value"
- result.field shouldBe "email"
- result.sources shouldBe Seq[String]("index")
- result.query shouldBe
- """{
- | "query": {
- | "bool": {
- | "must": [
- | {
- | "nested": {
- | "path": "profile",
- | "query": {
- | "term": {
- | "profile.postalCode": {
- | "value": "33600"
- | }
- | }
- | },
- | "inner_hits":{"name":"profile"}
- | }
- | }
- | ],
- | "must_not": [
- | {
- | "nested": {
- | "path": "profile",
- | "query": {
- | "term": {
- | "profile.postalCode": {
- | "value": "75001"
- | }
- | }
- | },
- | "inner_hits":{"name":"profile1"}
- | }
- | }
- | ]
- | }
- | },
- | "size": 0,
- | "aggs": {
- | "nested_email": {
- | "nested": {
- | "path": "email"
- | },
- | "aggs": {
- | "agg_distinct_email_value": {
- | "cardinality": {
- | "field": "email.value"
- | }
- | }
- | }
- | }
- | }
- |}
- |""".stripMargin.replaceAll("\\s+", "")
- }
-
- it should "accept date filtering" in {
- val results = ElasticQuery.count(
- SQLQuery(
- "select count(distinct email.value) as email from index where profile.postalCode = \"33600\" and profile.createdDate <= \"now-35M/M\""
- )
- )
- results.size shouldBe 1
- val result = results.head
- result.nested shouldBe true
- result.distinct shouldBe true
- result.agg shouldBe "nested_email.agg_distinct_email_value"
- result.field shouldBe "email"
- result.sources shouldBe Seq[String]("index")
- result.query shouldBe
- """{
- | "query": {
- | "bool": {
- | "filter": [
- | {
- | "nested": {
- | "path": "profile",
- | "query": {
- | "term": {
- | "profile.postalCode": {
- | "value": "33600"
- | }
- | }
- | },
- | "inner_hits":{"name":"profile"}
- | }
- | },
- | {
- | "nested": {
- | "path": "profile",
- | "query": {
- | "range": {
- | "profile.createdDate": {
- | "lte": "now-35M/M"
- | }
- | }
- | },
- | "inner_hits":{"name":"profile1"}
- | }
- | }
- | ]
- | }
- | },
- | "size": 0,
- | "aggs": {
- | "nested_email": {
- | "nested": {
- | "path": "email"
- | },
- | "aggs": {
- | "agg_distinct_email_value": {
- | "cardinality": {
- | "field": "email.value"
- | }
- | }
- | }
- | }
- | }
- |}
- |""".stripMargin.replaceAll("\\s+", "")
- }
-
- it should "perform select" in {
- val select = ElasticQuery.select(
- SQLQuery("""
- |SELECT
- |profileId,
- |profile_ccm.email as email,
- |profile_ccm.city as city,
- |profile_ccm.firstName as firstName,
- |profile_ccm.lastName as lastName,
- |profile_ccm.postalCode as postalCode,
- |profile_ccm.birthYear as birthYear
- |FROM index
- |WHERE
- |profile_ccm.postalCode BETWEEN "10" AND "99999"
- |AND
- |profile_ccm.birthYear <= 2000
- |limit 100""".stripMargin)
- )
- select.isDefined shouldBe true
- val result = select.get
- result.query shouldBe
- """{
- | "query": {
- | "bool": {
- | "filter": [
- | {
- | "nested": {
- | "path": "profile_ccm",
- | "query": {
- | "range": {
- | "profile_ccm.postalCode": {
- | "gte": "10",
- | "lte": "99999"
- | }
- | }
- | },
- | "inner_hits":{"name":"profile_ccm"}
- | }
- | },
- | {
- | "nested": {
- | "path": "profile_ccm",
- | "query": {
- | "range": {
- | "profile_ccm.birthYear": {
- | "lte": "2000"
- | }
- | }
- | },
- | "inner_hits":{"name":"profile_ccm1"}
- | }
- | }
- | ]
- | }
- | },
- | "from":0,
- | "size":100,
- | "_source": {
- | "includes": [
- | "profileId",
- | "profile_ccm.email",
- | "profile_ccm.city",
- | "profile_ccm.firstName",
- | "profile_ccm.lastName",
- | "profile_ccm.postalCode",
- | "profile_ccm.birthYear"
- | ]
- | }
- |}
- |""".stripMargin.replaceAll("\\s+", "")
- }
-
-}
diff --git a/elastic/src/test/scala/app/softnetwork/elastic/sql/SQLLiteralSpec.scala b/elastic/src/test/scala/app/softnetwork/elastic/sql/SQLLiteralSpec.scala
deleted file mode 100644
index 16b10632..00000000
--- a/elastic/src/test/scala/app/softnetwork/elastic/sql/SQLLiteralSpec.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package app.softnetwork.elastic.sql
-
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should.Matchers
-
-/** Created by smanciot on 17/02/17.
- */
-class SQLLiteralSpec extends AnyFlatSpec with Matchers {
-
- "SQLLiteral" should "perform sql like" in {
- val l = SQLLiteral("%dummy%")
- l.like(Seq("dummy")) should ===(true)
- l.like(Seq("aa dummy")) should ===(true)
- l.like(Seq("dummy bbb")) should ===(true)
- l.like(Seq("aaa dummy bbb")) should ===(true)
- l.like(Seq("dummY")) should ===(false)
- }
-}
diff --git a/elastic/src/test/scala/app/softnetwork/elastic/sql/SQLParserSpec.scala b/elastic/src/test/scala/app/softnetwork/elastic/sql/SQLParserSpec.scala
deleted file mode 100644
index 49a0c267..00000000
--- a/elastic/src/test/scala/app/softnetwork/elastic/sql/SQLParserSpec.scala
+++ /dev/null
@@ -1,271 +0,0 @@
-package app.softnetwork.elastic.sql
-
-import org.scalatest.flatspec.AnyFlatSpec
-import org.scalatest.matchers.should.Matchers
-
-object Queries {
- val numericalEq = "select $t.col1,$t.col2 from Table as $t where $t.identifier = 1.0"
- val numericalLt = "select * from Table where identifier < 1"
- val numericalLe = "select * from Table where identifier <= 1"
- val numericalGt = "select * from Table where identifier > 1"
- val numericalGe = "select * from Table where identifier >= 1"
- val numericalNe = "select * from Table where identifier <> 1"
- val literalEq = """select * from Table where identifier = "un""""
- val literalLt = "select * from Table where createdAt < \"now-35M/M\""
- val literalLe = "select * from Table where createdAt <= \"now-35M/M\""
- val literalGt = "select * from Table where createdAt > \"now-35M/M\""
- val literalGe = "select * from Table where createdAt >= \"now-35M/M\""
- val literalNe = """select * from Table where identifier <> "un""""
- val boolEq = """select * from Table where identifier = true"""
- val boolNe = """select * from Table where identifier <> false"""
- val literalLike = """select * from Table where identifier like "%un%""""
- val betweenExpression = """select * from Table where identifier between "1" and "2""""
- val andPredicate = "select * from Table where (identifier1 = 1) and (identifier2 > 2)"
- val orPredicate = "select * from Table where (identifier1 = 1) or (identifier2 > 2)"
- val leftPredicate =
- "select * from Table where ((identifier1 = 1) and (identifier2 > 2)) or (identifier3 = 3)"
- val rightPredicate =
- "select * from Table where (identifier1 = 1) and ((identifier2 > 2) or (identifier3 = 3))"
- val predicates =
- "select * from Table where ((identifier1 = 1) and (identifier2 > 2)) or ((identifier3 = 3) and (identifier4 = 4))"
- val nestedPredicate =
- "select * from Table where (identifier1 = 1) and nested((nested.identifier2 > 2) or (nested.identifier3 = 3))"
- val nestedCriteria =
- "select * from Table where (identifier1 = 1) and nested(nested.identifier3 = 3)"
- val childPredicate =
- "select * from Table where (identifier1 = 1) and child((child.identifier2 > 2) or (child.identifier3 = 3))"
- val childCriteria = "select * from Table where (identifier1 = 1) and child(child.identifier3 = 3)"
- val parentPredicate =
- "select * from Table where (identifier1 = 1) and parent((parent.identifier2 > 2) or (parent.identifier3 = 3))"
- val parentCriteria =
- "select * from Table where (identifier1 = 1) and parent(parent.identifier3 = 3)"
- val inLiteralExpression = "select * from Table where identifier in (\"val1\",\"val2\",\"val3\")"
- val inNumericalExpressionWithIntValues = "select * from Table where identifier in (1,2,3)"
- val inNumericalExpressionWithDoubleValues =
- "select * from Table where identifier in (1.0,2.1,3.4)"
- val notInLiteralExpression =
- "select * from Table where identifier not in (\"val1\",\"val2\",\"val3\")"
- val notInNumericalExpressionWithIntValues = "select * from Table where identifier not in (1,2,3)"
- val notInNumericalExpressionWithDoubleValues =
- "select * from Table where identifier not in (1.0,2.1,3.4)"
- val nestedWithBetween =
- "select * from Table where nested((ciblage.Archivage_CreationDate between \"now-3M/M\" and \"now\") and (ciblage.statutComportement = 1))"
- val count = "select count($t.id) as c1 from Table as t where $t.nom = \"Nom\""
- val countDistinct = "select count(distinct $t.id) as c2 from Table as t where $t.nom = \"Nom\""
- val countNested =
- "select count(email.value) as email from crmgp where profile.postalCode in (\"75001\",\"75002\")"
- val isNull = "select * from Table where identifier is null"
- val isNotNull = "select * from Table where identifier is not null"
- val geoDistanceCriteria =
- "select * from Table where distance(profile.location,(-70.0,40.0)) <= \"5km\""
-}
-
-/** Created by smanciot on 15/02/17.
- */
-class SQLParserSpec extends AnyFlatSpec with Matchers {
-
- import Queries._
-
- "SQLParser" should "parse numerical eq" in {
- val result = SQLParser(numericalEq)
- result.right.get.sql should ===(numericalEq)
- }
-
- it should "parse numerical ne" in {
- val result = SQLParser(numericalNe)
- result.right.get.sql should ===(numericalNe)
- }
-
- it should "parse numerical lt" in {
- val result = SQLParser(numericalLt)
- result.right.get.sql should ===(numericalLt)
- }
-
- it should "parse numerical le" in {
- val result = SQLParser(numericalLe)
- result.right.get.sql should ===(numericalLe)
- }
-
- it should "parse numerical gt" in {
- val result = SQLParser(numericalGt)
- result.right.get.sql should ===(numericalGt)
- }
-
- it should "parse numerical ge" in {
- val result = SQLParser(numericalGe)
- result.right.get.sql should ===(numericalGe)
- }
-
- it should "parse literal eq" in {
- val result = SQLParser(literalEq)
- result.right.get.sql should ===(literalEq)
- }
-
- it should "parse literal like" in {
- val result = SQLParser(literalLike)
- result.right.get.sql should ===(literalLike)
- }
-
- it should "parse literal ne" in {
- val result = SQLParser(literalNe)
- result.right.get.sql should ===(literalNe)
- }
-
- it should "parse literal lt" in {
- val result = SQLParser(literalLt)
- result.right.get.sql should ===(literalLt)
- }
-
- it should "parse literal le" in {
- val result = SQLParser(literalLe)
- result.right.get.sql should ===(literalLe)
- }
-
- it should "parse literal gt" in {
- val result = SQLParser(literalGt)
- result.right.get.sql should ===(literalGt)
- }
-
- it should "parse literal ge" in {
- val result = SQLParser(literalGe)
- result.right.get.sql should ===(literalGe)
- }
-
- it should "parse boolean eq" in {
- val result = SQLParser(boolEq)
- result.right.get.sql should ===(boolEq)
- }
-
- it should "parse boolean ne" in {
- val result = SQLParser(boolNe)
- result.right.get.sql should ===(boolNe)
- }
-
- it should "parse between" in {
- val result = SQLParser(betweenExpression)
- result.right.get.sql should ===(betweenExpression)
- }
-
- it should "parse and predicate" in {
- val result = SQLParser(andPredicate)
- result.right.get.sql should ===(andPredicate)
- }
-
- it should "parse or predicate" in {
- val result = SQLParser(orPredicate)
- result.right.get.sql should ===(orPredicate)
- }
-
- it should "parse left predicate with criteria" in {
- val result = SQLParser(leftPredicate)
- result.right.get.sql should ===(leftPredicate)
- }
-
- it should "parse right predicate with criteria" in {
- val result = SQLParser(rightPredicate)
- result.right.get.sql should ===(rightPredicate)
- }
-
- it should "parse multiple predicates" in {
- val result = SQLParser(predicates)
- result.right.get.sql should ===(predicates)
- }
-
- it should "parse nested predicate" in {
- val result = SQLParser(nestedPredicate)
- result.right.get.sql should ===(nestedPredicate)
- }
-
- it should "parse nested criteria" in {
- val result = SQLParser(nestedCriteria)
- result.right.get.sql should ===(nestedCriteria)
- }
-
- it should "parse child predicate" in {
- val result = SQLParser(childPredicate)
- result.right.get.sql should ===(childPredicate)
- }
-
- it should "parse child criteria" in {
- val result = SQLParser(childCriteria)
- result.right.get.sql should ===(childCriteria)
- }
-
- it should "parse parent predicate" in {
- val result = SQLParser(parentPredicate)
- result.right.get.sql should ===(parentPredicate)
- }
-
- it should "parse parent criteria" in {
- val result = SQLParser(parentCriteria)
- result.right.get.sql should ===(parentCriteria)
- }
-
- it should "parse in literal expression" in {
- val result = SQLParser(inLiteralExpression)
- result.right.get.sql should ===(inLiteralExpression)
- }
-
- it should "parse in numerical expression with Int values" in {
- val result = SQLParser(inNumericalExpressionWithIntValues)
- result.right.get.sql should ===(inNumericalExpressionWithIntValues)
- }
-
- it should "parse in numerical expression with Double values" in {
- val result = SQLParser(inNumericalExpressionWithDoubleValues)
- result.right.get.sql should ===(inNumericalExpressionWithDoubleValues)
- }
-
- it should "parse not in literal expression" in {
- val result = SQLParser(notInLiteralExpression)
- result.right.get.sql should ===(notInLiteralExpression)
- }
-
- it should "parse not in numerical expression with Int values" in {
- val result = SQLParser(notInNumericalExpressionWithIntValues)
- result.right.get.sql should ===(notInNumericalExpressionWithIntValues)
- }
-
- it should "parse not in numerical expression with Double values" in {
- val result = SQLParser(notInNumericalExpressionWithDoubleValues)
- result.right.get.sql should ===(notInNumericalExpressionWithDoubleValues)
- }
-
- it should "parse nested with between" in {
- val result = SQLParser(nestedWithBetween)
- result.right.get.sql should ===(nestedWithBetween)
- }
-
- it should "parse count" in {
- val result = SQLParser(count)
- result.right.get.sql should ===(count)
- }
-
- it should "parse distinct count" in {
- val result = SQLParser(countDistinct)
- result.right.get.sql should ===(countDistinct)
- }
-
- it should "parse count with nested criteria" in {
- val result = SQLParser(countNested)
- result.right.get.sql should ===(
- "select count(email.value) as email from crmgp where nested(profile.postalCode in (\"75001\",\"75002\"))"
- )
- }
-
- it should "parse is null" in {
- val result = SQLParser(isNull)
- result.right.get.sql should ===(isNull)
- }
-
- it should "parse is not null" in {
- val result = SQLParser(isNotNull)
- result.right.get.sql should ===(isNotNull)
- }
-
- it should "parse geo distance criteria" in {
- val result = SQLParser(geoDistanceCriteria)
- result.right.get.sql should ===(geoDistanceCriteria)
- }
-
-}
diff --git a/elastic/testkit/build.sbt b/elastic/testkit/build.sbt
deleted file mode 100644
index 270a5958..00000000
--- a/elastic/testkit/build.sbt
+++ /dev/null
@@ -1,31 +0,0 @@
-Test / parallelExecution := false
-
-organization := "app.softnetwork.persistence"
-
-name := "persistence-elastic-testkit"
-
-val jacksonExclusions = Seq(
- ExclusionRule(organization = "com.fasterxml.jackson.core"),
- ExclusionRule(organization = "com.fasterxml.jackson.dataformat"),
- ExclusionRule(organization = "com.fasterxml.jackson.datatype"),
- ExclusionRule(organization = "com.fasterxml.jackson.module")
-)
-
-val elastic = Seq(
- "com.sksamuel.elastic4s" %% "elastic4s-core" % Versions.elastic4s exclude ("org.elasticsearch", "elasticsearch"),
- "com.sksamuel.elastic4s" %% "elastic4s-http" % Versions.elastic4s exclude ("org.elasticsearch", "elasticsearch"),
- "org.elasticsearch" % "elasticsearch" % Versions.elasticSearch exclude ("org.apache.logging.log4j", "log4j-api"),
- "com.sksamuel.elastic4s" %% "elastic4s-testkit" % Versions.elastic4s exclude ("org.elasticsearch", "elasticsearch"),
- "com.sksamuel.elastic4s" %% "elastic4s-embedded" % Versions.elastic4s exclude ("org.elasticsearch", "elasticsearch"),
- "com.sksamuel.elastic4s" %% "elastic4s-http" % Versions.elastic4s exclude ("org.elasticsearch", "elasticsearch"),
- "org.elasticsearch" % "elasticsearch" % Versions.elasticSearch exclude ("org.apache.logging.log4j", "log4j-api"),
- "org.apache.logging.log4j" % "log4j-api" % Versions.log4j,
- "org.apache.logging.log4j" % "log4j-slf4j-impl" % Versions.log4j,
- "org.apache.logging.log4j" % "log4j-core" % Versions.log4j,
- "pl.allegro.tech" % "embedded-elasticsearch" % "2.10.0" excludeAll(jacksonExclusions:_*),
- "org.testcontainers" % "elasticsearch" % Versions.testContainers excludeAll(jacksonExclusions:_*)
-)
-
-libraryDependencies ++= Seq(
- "org.apache.tika" % "tika-core" % "1.18"
-) ++ elastic
diff --git a/elastic/testkit/src/main/scala/app/softnetwork/elastic/client/MockElasticClientApi.scala b/elastic/testkit/src/main/scala/app/softnetwork/elastic/client/MockElasticClientApi.scala
deleted file mode 100644
index 859bcc16..00000000
--- a/elastic/testkit/src/main/scala/app/softnetwork/elastic/client/MockElasticClientApi.scala
+++ /dev/null
@@ -1,238 +0,0 @@
-package app.softnetwork.elastic.client
-
-import akka.NotUsed
-import akka.actor.ActorSystem
-import akka.stream.scaladsl.Flow
-import app.softnetwork.elastic.sql.{SQLQueries, SQLQuery}
-import app.softnetwork.persistence.message.CountResponse
-import org.json4s.Formats
-import app.softnetwork.persistence.model.Timestamped
-import org.slf4j.{Logger, LoggerFactory}
-
-import scala.collection.immutable.Seq
-import scala.concurrent.{ExecutionContext, Future}
-import scala.language.implicitConversions
-import scala.reflect.ClassTag
-
-/** Created by smanciot on 12/04/2020.
- */
-trait MockElasticClientApi extends ElasticClientApi {
-
- protected lazy val log: Logger = LoggerFactory getLogger getClass.getName
-
- protected val elasticDocuments: ElasticDocuments = new ElasticDocuments() {}
-
- override def toggleRefresh(index: String, enable: Boolean): Unit = {}
-
- override def setReplicas(index: String, replicas: Int): Unit = {}
-
- override def updateSettings(index: String, settings: String) = true
-
- override def addAlias(index: String, alias: String): Boolean = true
-
- override def createIndex(index: String, settings: String): Boolean = true
-
- override def setMapping(index: String, _type: String, mapping: String): Boolean = true
-
- override def deleteIndex(index: String): Boolean = true
-
- override def closeIndex(index: String): Boolean = true
-
- override def openIndex(index: String): Boolean = true
-
- override def countAsync(jsonQuery: JSONQuery)(implicit
- ec: ExecutionContext
- ): Future[Option[Double]] =
- throw new UnsupportedOperationException
-
- override def count(jsonQuery: JSONQuery): Option[Double] =
- throw new UnsupportedOperationException
-
- override def get[U <: Timestamped](
- id: String,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit m: Manifest[U], formats: Formats): Option[U] =
- elasticDocuments.get(id).asInstanceOf[Option[U]]
-
- override def getAsync[U <: Timestamped](
- id: String,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit m: Manifest[U], ec: ExecutionContext, formats: Formats): Future[Option[U]] =
- Future.successful(elasticDocuments.get(id).asInstanceOf[Option[U]])
-
- override def search[U](sqlQuery: SQLQuery)(implicit m: Manifest[U], formats: Formats): List[U] =
- elasticDocuments.getAll.toList.asInstanceOf[List[U]]
-
- override def searchAsync[U](
- sqlQuery: SQLQuery
- )(implicit m: Manifest[U], ec: ExecutionContext, formats: Formats): Future[List[U]] =
- Future.successful(search(sqlQuery))
-
- override def multiSearch[U](
- sqlQueries: SQLQueries
- )(implicit m: Manifest[U], formats: Formats): List[List[U]] =
- throw new UnsupportedOperationException
-
- override def multiSearch[U](
- jsonQueries: JSONQueries
- )(implicit m: Manifest[U], formats: Formats): List[List[U]] =
- throw new UnsupportedOperationException
-
- override def index[U <: Timestamped](
- entity: U,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit u: ClassTag[U], formats: Formats): Boolean = {
- elasticDocuments.createOrUpdate(entity)
- true
- }
-
- override def indexAsync[U <: Timestamped](
- entity: U,
- index: Option[String] = None,
- maybeType: Option[String] = None
- )(implicit u: ClassTag[U], ec: ExecutionContext, formats: Formats): Future[Boolean] = {
- elasticDocuments.createOrUpdate(entity)
- Future.successful(true)
- }
-
- override def index(index: String, _type: String, id: String, source: String): Boolean =
- throw new UnsupportedOperationException
-
- override def indexAsync(index: String, _type: String, id: String, source: String)(implicit
- ec: ExecutionContext
- ): Future[Boolean] =
- throw new UnsupportedOperationException
-
- override def update[U <: Timestamped](
- entity: U,
- index: Option[String] = None,
- maybeType: Option[String] = None,
- upsert: Boolean = true
- )(implicit u: ClassTag[U], formats: Formats): Boolean = {
- elasticDocuments.createOrUpdate(entity)
- true
- }
-
- override def updateAsync[U <: Timestamped](
- entity: U,
- index: Option[String] = None,
- maybeType: Option[String] = None,
- upsert: Boolean = true
- )(implicit u: ClassTag[U], ec: ExecutionContext, formats: Formats): Future[Boolean] = {
- elasticDocuments.createOrUpdate(entity)
- Future.successful(true)
- }
-
- override def update(
- index: String,
- _type: String,
- id: String,
- source: String,
- upsert: Boolean
- ): Boolean = {
- log.warn(s"MockElasticClient - $id not updated for $source")
- false
- }
-
- override def updateAsync(
- index: String,
- _type: String,
- id: String,
- source: String,
- upsert: Boolean
- )(implicit ec: ExecutionContext): Future[Boolean] = Future.successful(false)
-
- override def delete(uuid: String, index: String, _type: String): Boolean = {
- if (elasticDocuments.get(uuid).isDefined) {
- elasticDocuments.delete(uuid)
- true
- } else {
- false
- }
- }
-
- override def deleteAsync(uuid: String, index: String, _type: String)(implicit
- ec: ExecutionContext
- ): Future[Boolean] = {
- Future.successful(delete(uuid, index, _type))
- }
-
- override def refresh(index: String): Boolean = true
-
- override def flush(index: String, force: Boolean, wait: Boolean): Boolean = true
-
- override type A = this.type
-
- override def bulk(implicit
- bulkOptions: BulkOptions,
- system: ActorSystem
- ): Flow[Seq[A], R, NotUsed] =
- throw new UnsupportedOperationException
-
- override def bulkResult: Flow[R, Set[String], NotUsed] =
- throw new UnsupportedOperationException
-
- override type R = this.type
-
- override def toBulkAction(bulkItem: BulkItem): A =
- throw new UnsupportedOperationException
-
- override implicit def toBulkElasticAction(a: A): BulkElasticAction =
- throw new UnsupportedOperationException
-
- override implicit def toBulkElasticResult(r: R): BulkElasticResult =
- throw new UnsupportedOperationException
-
- override def countAsync(sqlQuery: SQLQuery)(implicit
- ec: ExecutionContext
- ): Future[scala.Seq[CountResponse]] =
- throw new UnsupportedOperationException
-
- override def multiSearchWithInnerHits[U, I](jsonQueries: JSONQueries, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[List[(U, List[I])]] = List.empty
-
- override def multiSearchWithInnerHits[U, I](sqlQueries: SQLQueries, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[List[(U, List[I])]] = List.empty
-
- override def search[U](jsonQuery: JSONQuery)(implicit m: Manifest[U], formats: Formats): List[U] =
- List.empty
-
- override def searchWithInnerHits[U, I](jsonQuery: JSONQuery, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[(U, List[I])] = List.empty
-
- override def searchWithInnerHits[U, I](sqlQuery: SQLQuery, innerField: String)(implicit
- m1: Manifest[U],
- m2: Manifest[I],
- formats: Formats
- ): List[(U, List[I])] = List.empty
-}
-
-trait ElasticDocuments {
-
- private[this] var documents: Map[String, Timestamped] = Map()
-
- def createOrUpdate(entity: Timestamped): Unit = {
- documents = documents.updated(entity.uuid, entity)
- }
-
- def delete(uuid: String): Unit = {
- documents = documents - uuid
- }
-
- def getAll: Iterable[Timestamped] = documents.values
-
- def get(uuid: String): Option[Timestamped] = documents.get(uuid)
-
-}
diff --git a/elastic/testkit/src/main/scala/app/softnetwork/elastic/scalatest/ElasticDockerTestKit.scala b/elastic/testkit/src/main/scala/app/softnetwork/elastic/scalatest/ElasticDockerTestKit.scala
deleted file mode 100644
index 3e7f36da..00000000
--- a/elastic/testkit/src/main/scala/app/softnetwork/elastic/scalatest/ElasticDockerTestKit.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package app.softnetwork.elastic.scalatest
-
-import org.scalatest.Suite
-import org.testcontainers.elasticsearch.ElasticsearchContainer
-
-import scala.util.{Failure, Success}
-
-/** Created by smanciot on 28/06/2018.
- */
-trait ElasticDockerTestKit extends ElasticTestKit { _: Suite =>
-
- override lazy val elasticURL: String = s"http://${elasticContainer.getHttpHostAddress}"
-
- lazy val elasticContainer = new ElasticsearchContainer(
- s"docker.elastic.co/elasticsearch/elasticsearch:$elasticVersion"
- )
-
- override def start(): Unit = elasticContainer.start()
-
- override def stop(): Unit = elasticContainer.stop()
-
-}
diff --git a/elastic/testkit/src/main/scala/app/softnetwork/elastic/scalatest/ElasticTestKit.scala b/elastic/testkit/src/main/scala/app/softnetwork/elastic/scalatest/ElasticTestKit.scala
deleted file mode 100644
index 9fd80461..00000000
--- a/elastic/testkit/src/main/scala/app/softnetwork/elastic/scalatest/ElasticTestKit.scala
+++ /dev/null
@@ -1,313 +0,0 @@
-package app.softnetwork.elastic.scalatest
-
-import app.softnetwork.concurrent.scalatest.CompletionTestKit
-import com.sksamuel.elastic4s.{IndexAndTypes, Indexes}
-import com.sksamuel.elastic4s.http.index.admin.RefreshIndexResponse
-import com.sksamuel.elastic4s.http.{ElasticClient, ElasticDsl, ElasticProperties}
-import com.typesafe.config.{Config, ConfigFactory}
-import org.elasticsearch.ResourceAlreadyExistsException
-import org.elasticsearch.transport.RemoteTransportException
-import org.scalatest.{BeforeAndAfterAll, Suite}
-import org.scalatest.matchers.{MatchResult, Matcher}
-import org.slf4j.Logger
-
-import java.util.UUID
-import scala.util.{Failure, Success}
-
-/** Created by smanciot on 18/05/2021.
- */
-trait ElasticTestKit extends ElasticDsl with CompletionTestKit with BeforeAndAfterAll { _: Suite =>
-
- def log: Logger
-
- def elasticVersion: String = "6.7.2"
-
- def elasticURL: String
-
- lazy val elasticConfig: Config = ConfigFactory
- .parseString(elasticConfigAsString)
- .withFallback(ConfigFactory.load("softnetwork-elastic.conf"))
-
- lazy val elasticConfigAsString: String =
- s"""
- |elastic {
- | credentials {
- | url = "$elasticURL"
- | }
- | multithreaded = false
- | discovery-enabled = false
- |}
- |""".stripMargin
-
- lazy val clusterName: String = s"test-${UUID.randomUUID()}"
-
- lazy val client: ElasticClient = ElasticClient(ElasticProperties(elasticURL))
-
- def start(): Unit = ()
-
- def stop(): Unit = ()
-
- override def beforeAll(): Unit = {
- start()
- client.execute {
- createIndexTemplate("all_templates", "*").settings(
- Map("number_of_shards" -> 1, "number_of_replicas" -> 0)
- )
- } complete () match {
- case Success(_) => ()
- case Failure(f) => throw f
- }
- }
-
- override def afterAll(): Unit = {
- client.close()
- stop()
- }
-
- // Rewriting methods from IndexMatchers in elastic4s with the ElasticClient
- def haveCount(expectedCount: Int): Matcher[String] =
- (left: String) => {
- client.execute(search(left).size(0)) complete () match {
- case Success(s) =>
- val count = s.result.totalHits
- MatchResult(
- count == expectedCount,
- s"Index $left had count $count but expected $expectedCount",
- s"Index $left had document count $expectedCount"
- )
- case Failure(f) => throw f
- }
- }
-
- def containDoc(expectedId: String): Matcher[String] =
- (left: String) => {
- client.execute(get(expectedId).from(left)) complete () match {
- case Success(s) =>
- val exists = s.result.exists
- MatchResult(
- exists,
- s"Index $left did not contain expected document $expectedId",
- s"Index $left contained document $expectedId"
- )
- case Failure(f) => throw f
- }
- }
-
- def beCreated(): Matcher[String] =
- (left: String) => {
- client.execute(indexExists(left)) complete () match {
- case Success(s) =>
- val exists = s.result.isExists
- MatchResult(
- exists,
- s"Index $left did not exist",
- s"Index $left exists"
- )
- case Failure(f) => throw f
- }
- }
-
- def beEmpty(): Matcher[String] =
- (left: String) => {
- client.execute(search(left).size(0)) complete () match {
- case Success(s) =>
- val count = s.result.totalHits
- MatchResult(
- count == 0,
- s"Index $left was not empty",
- s"Index $left was empty"
- )
- case Failure(f) => throw f
- }
- }
-
- // Copy/paste methos HttpElasticSugar as it is not available yet
-
- // refresh all indexes
- def refreshAll(): RefreshIndexResponse = refresh(Indexes.All)
-
- // refreshes all specified indexes
- def refresh(indexes: Indexes): RefreshIndexResponse = {
- client
- .execute {
- refreshIndex(indexes)
- } complete () match {
- case Success(s) => s.result
- case Failure(f) => throw f
- }
- }
-
- def blockUntilGreen(): Unit = {
- blockUntil("Expected cluster to have green status") { () =>
- client
- .execute {
- clusterHealth()
- } complete () match {
- case Success(s) => s.result.status.toUpperCase == "GREEN"
- case Failure(f) => throw f
- }
- }
- }
-
- def blockUntil(explain: String)(predicate: () => Boolean): Unit = {
- blockUntil(explain, 16, 200)(predicate)
- }
-
- def ensureIndexExists(index: String): Unit = {
- client.execute {
- createIndex(index)
- } complete () match {
- case Success(_) => ()
- case Failure(f) =>
- f match {
- case _: ResourceAlreadyExistsException => // Ok, ignore.
- case _: RemoteTransportException => // Ok, ignore.
- case other => throw other
- }
- }
- }
-
- def doesIndexExists(name: String): Boolean = {
- client
- .execute {
- indexExists(name)
- } complete () match {
- case Success(s) => s.result.isExists
- case _ => false
- }
- }
-
- def doesAliasExists(name: String): Boolean = {
- client
- .execute {
- aliasExists(name)
- } complete () match {
- case Success(s) => s.result.isExists
- case _ => false
- }
- }
-
- def deleteIndex(name: String): Unit = {
- if (doesIndexExists(name)) {
- client.execute {
- ElasticDsl.deleteIndex(name)
- } complete () match {
- case Success(_) => ()
- case Failure(f) => throw f
- }
- }
- }
-
- def truncateIndex(index: String): Unit = {
- deleteIndex(index)
- ensureIndexExists(index)
- blockUntilEmpty(index)
- }
-
- def blockUntilDocumentExists(id: String, index: String, _type: String): Unit = {
- blockUntil(s"Expected to find document $id") { () =>
- client
- .execute {
- get(id).from(index / _type)
- } complete () match {
- case Success(s) => s.result.exists
- case _ => false
- }
- }
- }
-
- def blockUntilCount(expected: Long, index: String): Unit = {
- blockUntil(s"Expected count of $expected") { () =>
- client.execute {
- search(index).matchAllQuery().size(0)
- } complete () match {
- case Success(s) => expected <= s.result.totalHits
- case Failure(f) => throw f
- }
- }
- }
-
- def blockUntilCount(expected: Long, indexAndTypes: IndexAndTypes): Unit = {
- blockUntil(s"Expected count of $expected") { () =>
- client.execute {
- searchWithType(indexAndTypes).matchAllQuery().size(0)
- } complete () match {
- case Success(s) => expected <= s.result.totalHits
- case Failure(f) => throw f
- }
- }
- }
-
- /** Will block until the given index and optional types have at least the given number of
- * documents.
- */
- def blockUntilCount(expected: Long, index: String, types: String*): Unit = {
- blockUntil(s"Expected count of $expected") { () =>
- client.execute {
- searchWithType(index / types).matchAllQuery().size(0)
- } complete () match {
- case Success(s) => expected <= s.result.totalHits
- case Failure(f) => throw f
- }
- }
- }
-
- def blockUntilExactCount(expected: Long, index: String, types: String*): Unit = {
- blockUntil(s"Expected count of $expected") { () =>
- client
- .execute {
- searchWithType(index / types).size(0)
- } complete () match {
- case Success(s) => expected == s.result.totalHits
- case Failure(f) => throw f
- }
- }
- }
-
- def blockUntilEmpty(index: String): Unit = {
- blockUntil(s"Expected empty index $index") { () =>
- client
- .execute {
- search(Indexes(index)).size(0)
- } complete () match {
- case Success(s) => s.result.totalHits == 0
- case Failure(f) => throw f
- }
- }
- }
-
- def blockUntilIndexExists(index: String): Unit = {
- blockUntil(s"Expected exists index $index") { () ⇒
- doesIndexExists(index)
- }
- }
-
- def blockUntilIndexNotExists(index: String): Unit = {
- blockUntil(s"Expected not exists index $index") { () ⇒
- !doesIndexExists(index)
- }
- }
-
- def blockUntilAliasExists(alias: String): Unit = {
- blockUntil(s"Expected exists alias $alias") { () ⇒
- doesAliasExists(alias)
- }
- }
-
- def blockUntilDocumentHasVersion(
- index: String,
- _type: String,
- id: String,
- version: Long
- ): Unit = {
- blockUntil(s"Expected document $id to have version $version") { () =>
- client
- .execute {
- get(id).from(index / _type)
- } complete () match {
- case Success(s) => s.result.version == version
- case Failure(f) => throw f
- }
- }
- }
-}
diff --git a/elastic/testkit/src/main/scala/app/softnetwork/elastic/scalatest/EmbeddedElasticTestKit.scala b/elastic/testkit/src/main/scala/app/softnetwork/elastic/scalatest/EmbeddedElasticTestKit.scala
deleted file mode 100644
index d0343344..00000000
--- a/elastic/testkit/src/main/scala/app/softnetwork/elastic/scalatest/EmbeddedElasticTestKit.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-package app.softnetwork.elastic.scalatest
-
-import org.scalatest.Suite
-import pl.allegro.tech.embeddedelasticsearch.EmbeddedElastic
-import pl.allegro.tech.embeddedelasticsearch.PopularProperties._
-
-import java.net.ServerSocket
-import java.util.concurrent.TimeUnit
-
-trait EmbeddedElasticTestKit extends ElasticTestKit { _: Suite =>
-
- override lazy val elasticURL: String = s"http://127.0.0.1:${embeddedElastic.getHttpPort}"
-
- override def stop(): Unit = embeddedElastic.stop()
-
- private[this] def dynamicPort: Int = {
- val socket = new ServerSocket(0)
- val port = socket.getLocalPort
- socket.close()
- port
- }
-
- private[this] val embeddedElastic: EmbeddedElastic = EmbeddedElastic
- .builder()
- .withElasticVersion(elasticVersion)
- .withSetting(HTTP_PORT, dynamicPort)
- .withSetting(CLUSTER_NAME, clusterName)
- .withCleanInstallationDirectoryOnStop(true)
- .withEsJavaOpts("-Xms128m -Xmx512m")
- .withStartTimeout(2, TimeUnit.MINUTES)
- .build()
- .start()
-
-}
diff --git a/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/JdbcPersonToElasticTestKit.scala b/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/JdbcPersonToElasticTestKit.scala
deleted file mode 100644
index 1c395ea4..00000000
--- a/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/JdbcPersonToElasticTestKit.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package app.softnetwork.persistence.person
-
-import akka.actor.typed.ActorSystem
-import app.softnetwork.persistence.jdbc.query.{JdbcJournalProvider, JdbcOffsetProvider}
-import app.softnetwork.persistence.person.query.{
- PersonToElasticProcessorStream,
- PersonToExternalProcessorStream
-}
-import com.typesafe.config.Config
-import slick.jdbc.JdbcProfile
-
-trait JdbcPersonToElasticTestKit
- extends PersonToElasticTestKit
- with JdbcPersonTestKit
- with JdbcProfile {
-
- override def person2ExternalProcessorStream: ActorSystem[_] => PersonToExternalProcessorStream =
- sys => {
- new PersonToElasticProcessorStream with JdbcJournalProvider with JdbcOffsetProvider {
- override val forTests: Boolean = true
- override implicit def system: ActorSystem[_] = sys
-
- override def config: Config = JdbcPersonToElasticTestKit.this.config.withFallback(
- JdbcPersonToElasticTestKit.this.elasticConfig
- )
- }
- }
-}
diff --git a/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/MySQLPersonToElasticTestKit.scala b/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/MySQLPersonToElasticTestKit.scala
deleted file mode 100644
index d5d8b1a2..00000000
--- a/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/MySQLPersonToElasticTestKit.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package app.softnetwork.persistence.person
-
-import app.softnetwork.persistence.jdbc.scalatest.MySQLTestKit
-
-trait MySQLPersonToElasticTestKit extends JdbcPersonToElasticTestKit with MySQLTestKit
diff --git a/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/PersonToElasticTestKit.scala b/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/PersonToElasticTestKit.scala
deleted file mode 100644
index fa4d77e6..00000000
--- a/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/PersonToElasticTestKit.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package app.softnetwork.persistence.person
-
-import app.softnetwork.elastic.client.jest.JestClientApi
-import app.softnetwork.elastic.persistence.query.ElasticProvider
-import app.softnetwork.elastic.scalatest.ElasticTestKit
-import app.softnetwork.persistence.ManifestWrapper
-import app.softnetwork.persistence.person.model.Person
-import app.softnetwork.persistence.query.ExternalPersistenceProvider
-import app.softnetwork.persistence.schema.Schema
-import com.typesafe.config.Config
-
-trait PersonToElasticTestKit extends PersonTestKit with ElasticTestKit { _: Schema =>
-
- override lazy val externalPersistenceProvider: ExternalPersistenceProvider[Person] =
- new ElasticProvider[Person] with JestClientApi with ManifestWrapper[Person] {
- override def config: Config = PersonToElasticTestKit.this.elasticConfig
- override protected val manifestWrapper: ManifestW = ManifestW()
- }
-
- override def start(): Unit = {
- super.start()
- initAndJoinCluster()
- }
-
- override def stop(): Unit = {
- shutdownCluster()
- super.stop()
- }
-
-}
diff --git a/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/PostgresPersonToElasticTestKit.scala b/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/PostgresPersonToElasticTestKit.scala
deleted file mode 100644
index 65074f02..00000000
--- a/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/PostgresPersonToElasticTestKit.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package app.softnetwork.persistence.person
-
-import app.softnetwork.persistence.jdbc.scalatest.PostgresTestKit
-
-trait PostgresPersonToElasticTestKit extends JdbcPersonToElasticTestKit with PostgresTestKit
diff --git a/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/query/PersonToElasticProcessorStream.scala b/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/query/PersonToElasticProcessorStream.scala
deleted file mode 100644
index 7a8e013a..00000000
--- a/elastic/testkit/src/main/scala/app/softnetwork/persistence/person/query/PersonToElasticProcessorStream.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package app.softnetwork.persistence.person.query
-
-import app.softnetwork.elastic.client.jest.JestClientApi
-import app.softnetwork.elastic.persistence.query.ElasticProvider
-import app.softnetwork.persistence.ManifestWrapper
-import app.softnetwork.persistence.person.model.Person
-import app.softnetwork.persistence.query.{JournalProvider, OffsetProvider}
-
-trait PersonToElasticProcessorStream
- extends PersonToExternalProcessorStream
- with ElasticProvider[Person]
- with JestClientApi
- with ManifestWrapper[Person] { _: JournalProvider with OffsetProvider =>
- override protected val manifestWrapper: ManifestW = ManifestW()
-}
diff --git a/elastic/testkit/src/test/resources/application.conf b/elastic/testkit/src/test/resources/application.conf
deleted file mode 100644
index ba8abfad..00000000
--- a/elastic/testkit/src/test/resources/application.conf
+++ /dev/null
@@ -1,3 +0,0 @@
-akka.coordinated-shutdown.exit-jvm = off
-elastic.multithreaded = false
-clustering.port = 0
diff --git a/elastic/testkit/src/test/resources/avatar.jpg b/elastic/testkit/src/test/resources/avatar.jpg
deleted file mode 100644
index 7a214ba8..00000000
Binary files a/elastic/testkit/src/test/resources/avatar.jpg and /dev/null differ
diff --git a/elastic/testkit/src/test/resources/avatar.pdf b/elastic/testkit/src/test/resources/avatar.pdf
deleted file mode 100644
index cf44452f..00000000
Binary files a/elastic/testkit/src/test/resources/avatar.pdf and /dev/null differ
diff --git a/elastic/testkit/src/test/resources/avatar.png b/elastic/testkit/src/test/resources/avatar.png
deleted file mode 100644
index a11b4dcd..00000000
Binary files a/elastic/testkit/src/test/resources/avatar.png and /dev/null differ
diff --git a/elastic/testkit/src/test/resources/mapping/sample.mustache b/elastic/testkit/src/test/resources/mapping/sample.mustache
deleted file mode 100644
index f3e19d45..00000000
--- a/elastic/testkit/src/test/resources/mapping/sample.mustache
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "{{type}}": {
- "properties": {
- "uuid": {
- "type": "keyword",
- "index": true
- },
- "createdDate": {
- "type": "date"
- },
- "lastUpdated": {
- "type": "date"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/elastic/testkit/src/test/scala/app/softnetwork/elastic/client/ElasticClientSpec.scala b/elastic/testkit/src/test/scala/app/softnetwork/elastic/client/ElasticClientSpec.scala
deleted file mode 100644
index abfb92bb..00000000
--- a/elastic/testkit/src/test/scala/app/softnetwork/elastic/client/ElasticClientSpec.scala
+++ /dev/null
@@ -1,468 +0,0 @@
-package app.softnetwork.elastic.client
-
-import java.io.ByteArrayInputStream
-import java.util.concurrent.TimeUnit
-import java.util.UUID
-import akka.actor.ActorSystem
-import app.softnetwork.elastic.client.JestProviders._
-import app.softnetwork.elastic.sql.SQLQuery
-import com.fasterxml.jackson.core.JsonParseException
-import com.sksamuel.elastic4s.searches.queries.matches.MatchAllQuery
-import io.searchbox.client.JestClient
-import io.searchbox.indices.CreateIndex
-import io.searchbox.indices.aliases.AliasExists
-import io.searchbox.indices.mapping.PutMapping
-import io.searchbox.indices.settings.GetSettings
-import org.scalatest.flatspec.AnyFlatSpecLike
-import org.scalatest.matchers.should.Matchers
-import app.softnetwork.persistence._
-import app.softnetwork.serialization._
-import app.softnetwork.elastic.model._
-import app.softnetwork.elastic.scalatest.EmbeddedElasticTestKit
-import app.softnetwork.persistence.person.model.Person
-import org.json4s.Formats
-import org.slf4j.{Logger, LoggerFactory}
-
-import java.nio.file.{Files, Paths}
-import scala.concurrent.{Await, ExecutionContextExecutor}
-import scala.concurrent.duration.Duration
-import scala.util.{Failure, Success}
-
-/** Created by smanciot on 28/06/2018.
- */
-class ElasticClientSpec extends AnyFlatSpecLike with EmbeddedElasticTestKit with Matchers {
-
- lazy val log: Logger = LoggerFactory getLogger getClass.getName
-
- implicit val system: ActorSystem = ActorSystem(generateUUID())
-
- implicit val executionContext: ExecutionContextExecutor = system.dispatcher
-
- implicit val formats: Formats = commonFormats
-
- lazy val pClient = new PersonProvider(elasticConfig)
- lazy val sClient = new SampleProvider(elasticConfig)
- lazy val bClient = new BinaryProvider(elasticConfig)
-
- import scala.language.implicitConversions
-
- implicit def toSQLQuery(sqlQuery: String): SQLQuery = SQLQuery(sqlQuery)
-
- override def beforeAll(): Unit = {
- super.beforeAll()
- pClient.createIndex("person")
- }
-
- override def afterAll(): Unit = {
- Await.result(system.terminate(), Duration(30, TimeUnit.SECONDS))
- super.afterAll()
- }
-
- "Creating an index and then delete it" should "work fine" in {
- pClient.createIndex("create_delete")
- blockUntilIndexExists("create_delete")
- "create_delete" should beCreated
-
- pClient.deleteIndex("create_delete")
- blockUntilIndexNotExists("create_delete")
- "create_delete" should not(beCreated())
- }
-
- "Adding an alias" should "work" in {
- pClient.addAlias("person", "person_alias")
-
- val aliasExists = new AliasExists.Builder().build()
- pClient.jestClient.execute(aliasExists).isSucceeded shouldBe true
- }
-
- private def settings =
- pClient.jestClient
- .execute(new GetSettings.Builder().addIndex("person").build())
- .getJsonObject
- .getAsJsonObject("person")
- .getAsJsonObject("settings")
-
- "Toggle refresh" should "work" in {
- pClient.toggleRefresh("person", enable = false)
-
- settings.getAsJsonObject("index").get("refresh_interval").getAsString shouldBe "-1"
-
- pClient.toggleRefresh("person", enable = true)
- settings.getAsJsonObject("index").get("refresh_interval").getAsString shouldBe "1s"
- }
-
- "Updating number of replicas" should "work" in {
- pClient.setReplicas("person", 3)
- settings.getAsJsonObject("index").get("number_of_replicas").getAsString shouldBe "3"
-
- pClient.setReplicas("person", 0)
- settings.getAsJsonObject("index").get("number_of_replicas").getAsString shouldBe "0"
- }
-
- val persons = List(
- """ { "uuid": "A12", "name": "Homer Simpson", "birthDate": "1967-11-21 12:00:00"} """,
- """ { "uuid": "A14", "name": "Moe Szyslak", "birthDate": "1967-11-21 12:00:00"} """,
- """ { "uuid": "A16", "name": "Barney Gumble", "birthDate": "1969-05-09 21:00:00"} """
- )
-
- private val personsWithUpsert =
- persons :+ """ { "uuid": "A16", "name": "Barney Gumble2", "birthDate": "1969-05-09 21:00:00"} """
-
- val children = List(
- """ { "parentId": "A16", "name": "Steve Gumble", "birthDate": "1999-05-09 21:00:00"} """,
- """ { "parentId": "A16", "name": "Josh Gumble", "birthDate": "1999-05-09 21:00:00"} """
- )
-
- "Bulk index valid json without id key and suffix key" should "work" in {
- implicit val bulkOptions: BulkOptions = BulkOptions("person1", "person", 2)
- implicit val jclient: JestClient = pClient.jestClient
- val indices = pClient.bulk[String](persons.iterator, identity, None, None, None)
-
- indices should contain only "person1"
-
- blockUntilCount(3, "person1")
-
- "person1" should haveCount(3)
-
- val response = client.execute {
- search("person1").query(MatchAllQuery())
- } complete ()
-
- response.result.hits.hits.foreach { h =>
- h.id should not be h.sourceField("uuid")
- }
-
- response.result.hits.hits
- .map(
- _.sourceField("name")
- ) should contain allOf ("Homer Simpson", "Moe Szyslak", "Barney Gumble")
- }
-
- "Bulk index valid json with an id key but no suffix key" should "work" in {
- pClient.jestClient.execute(new CreateIndex.Builder("person2").build())
- val childMapping = new PutMapping.Builder(
- "person2",
- "child",
- "{ \"child\" : { \"_parent\" : {\"type\": \"person\"}, \"properties\" : { \"name\" : {\"type\" : \"string\", \"index\" : \"not_analyzed\"} } } }"
- ).build()
- pClient.jestClient.execute(childMapping)
-
- implicit val bulkOptions: BulkOptions = BulkOptions("person2", "person", 1000)
- implicit val jclient: JestClient = pClient.jestClient
- val indices = pClient.bulk[String](persons.iterator, identity, Some("uuid"), None, None)
- refresh(indices)
-
- indices should contain only "person2"
-
- blockUntilCount(3, "person2")
-
- "person2" should haveCount(3)
-
- val response = client.execute {
- search("person2").query(MatchAllQuery())
- } complete ()
-
- response.result.hits.hits.foreach { h =>
- h.id shouldBe h.sourceField("uuid")
- }
-
- response.result.hits.hits
- .map(
- _.sourceField("name")
- ) should contain allOf ("Homer Simpson", "Moe Szyslak", "Barney Gumble")
-
- // FIXME elastic >= v 6.x no more multiple Parent / Child relationship allowed within the same index
-// val childIndices =
-// pClient.bulk[String](children.iterator, identity, None, None, None, None, None, Some("parentId"))(
-// jclient,
-// BulkOptions("person2", "child", 1000),
-// system)
-// pClient.refresh("person2")
-//
-// childIndices should contain only "person2"
-//
-// blockUntilCount(2, "person2", "child")
-//
-// "person2" should haveCount(5)
- }
-
- "Bulk index valid json with an id key and a suffix key" should "work" in {
- implicit val bulkOptions: BulkOptions = BulkOptions("person", "person", 1000)
- implicit val jclient: JestClient = pClient.jestClient
- val indices =
- pClient.bulk[String](persons.iterator, identity, Some("uuid"), Some("birthDate"), None, None)
- refresh(indices)
-
- indices should contain allOf ("person-1967-11-21", "person-1969-05-09")
-
- blockUntilCount(2, "person-1967-11-21")
- blockUntilCount(1, "person-1969-05-09")
-
- "person-1967-11-21" should haveCount(2)
- "person-1969-05-09" should haveCount(1)
-
- val response = client.execute {
- search("person-1967-11-21", "person-1969-05-09").query(MatchAllQuery())
- } complete ()
-
- response.result.hits.hits.foreach { h =>
- h.id shouldBe h.sourceField("uuid")
- }
-
- response.result.hits.hits
- .map(
- _.sourceField("name")
- ) should contain allOf ("Homer Simpson", "Moe Szyslak", "Barney Gumble")
- }
-
- "Bulk index invalid json with an id key and a suffix key" should "work" in {
- implicit val bulkOptions: BulkOptions = BulkOptions("person_error", "person", 1000)
- implicit val jclient: JestClient = pClient.jestClient
- intercept[JsonParseException] {
- val invalidJson = persons :+ "fail"
- pClient.bulk[String](invalidJson.iterator, identity, None, None, None)
- }
- }
-
- "Bulk upsert valid json with an id key but no suffix key" should "work" in {
- implicit val bulkOptions: BulkOptions = BulkOptions("person4", "person", 1000)
- implicit val jclient: JestClient = pClient.jestClient
- val indices =
- pClient
- .bulk[String](personsWithUpsert.iterator, identity, Some("uuid"), None, None, Some(true))
- refresh(indices)
-
- indices should contain only "person4"
-
- blockUntilCount(3, "person4")
-
- "person4" should haveCount(3)
-
- val response = client.execute {
- search("person4").query(MatchAllQuery())
- } complete ()
-
- response.result.hits.hits.foreach { h =>
- h.id shouldBe h.sourceField("uuid")
- }
-
- response.result.hits.hits
- .map(
- _.sourceField("name")
- ) should contain allOf ("Homer Simpson", "Moe Szyslak", "Barney Gumble2")
- }
-
- "Bulk upsert valid json with an id key and a suffix key" should "work" in {
- implicit val bulkOptions: BulkOptions = BulkOptions("person5", "person", 1000)
- implicit val jclient: JestClient = pClient.jestClient
- val indices = pClient.bulk[String](
- personsWithUpsert.iterator,
- identity,
- Some("uuid"),
- Some("birthDate"),
- None,
- Some(true)
- )
- refresh(indices)
-
- indices should contain allOf ("person5-1967-11-21", "person5-1969-05-09")
-
- blockUntilCount(2, "person5-1967-11-21")
- blockUntilCount(1, "person5-1969-05-09")
-
- "person5-1967-11-21" should haveCount(2)
- "person5-1969-05-09" should haveCount(1)
-
- val response = client.execute {
- search("person5-1967-11-21", "person5-1969-05-09").query(MatchAllQuery())
- } complete ()
-
- response.result.hits.hits.foreach { h =>
- h.id shouldBe h.sourceField("uuid")
- }
-
- response.result.hits.hits
- .map(
- _.sourceField("name")
- ) should contain allOf ("Homer Simpson", "Moe Szyslak", "Barney Gumble2")
- }
-
- "Count" should "work" in {
- implicit val bulkOptions: BulkOptions = BulkOptions("person6", "person", 1000)
- implicit val jclient: JestClient = pClient.jestClient
- val indices =
- pClient
- .bulk[String](personsWithUpsert.iterator, identity, Some("uuid"), None, None, Some(true))
- refresh(indices)
-
- indices should contain only "person6"
-
- blockUntilCount(3, "person6")
-
- "person6" should haveCount(3)
-
- import scala.collection.immutable.Seq
-
- pClient.countAsync(JSONQuery("{}", Seq[String]("person6"), Seq[String]())) complete () match {
- case Success(s) => s.getOrElse(0d).toInt should ===(3)
- case Failure(f) => fail(f.getMessage)
- }
- }
-
- "Search" should "work" in {
- implicit val bulkOptions: BulkOptions = BulkOptions("person7", "person", 1000)
- implicit val jclient: JestClient = pClient.jestClient
- val indices =
- pClient
- .bulk[String](personsWithUpsert.iterator, identity, Some("uuid"), None, None, Some(true))
- refresh(indices)
-
- indices should contain only "person7"
-
- blockUntilCount(3, "person7")
-
- "person7" should haveCount(3)
-
- pClient.searchAsync[Person](SQLQuery("select * from person7")) assert {
- _.size should ===(3)
- }
-
- pClient.searchAsync[Person](SQLQuery("select * from person7 where _id=\"A16\"")) assert {
- _.size should ===(1)
- }
-
- }
-
- "Get all" should "work" in {
- implicit val bulkOptions: BulkOptions = BulkOptions("person8", "person", 1000)
- implicit val jclient: JestClient = pClient.jestClient
- val indices =
- pClient
- .bulk[String](personsWithUpsert.iterator, identity, Some("uuid"), None, None, Some(true))
- refresh(indices)
-
- indices should contain only "person8"
-
- blockUntilCount(3, "person8")
-
- "person8" should haveCount(3)
-
- val response = pClient.search[Person]("select * from person8")
-
- response.size should ===(3)
-
- }
-
- "Get" should "work" in {
- implicit val bulkOptions: BulkOptions = BulkOptions("person9", "person", 1000)
- implicit val jclient: JestClient = pClient.jestClient
- val indices =
- pClient
- .bulk[String](personsWithUpsert.iterator, identity, Some("uuid"), None, None, Some(true))
- refresh(indices)
-
- indices should contain only "person9"
-
- blockUntilCount(3, "person9")
-
- "person9" should haveCount(3)
-
- val response = pClient.get[Person]("A16", Some("person9"))
-
- response.isDefined shouldBe true
- response.get.uuid shouldBe "A16"
-
- }
-
- "Index" should "work" in {
- implicit val jclient: JestClient = sClient.jestClient
- val uuid = UUID.randomUUID().toString
- val sample = Sample(uuid)
- val result = sClient.index(sample)
- result shouldBe true
-
- val result2 = sClient.get[Sample](uuid)
- result2.isDefined shouldBe true
- result2.get.uuid shouldBe uuid
- }
-
- "Update" should "work" in {
- implicit val jclient: JestClient = sClient.jestClient
- val uuid = UUID.randomUUID().toString
- val sample = Sample(uuid)
- val result = sClient.update(sample)
- result shouldBe true
-
- val result2 = sClient.get[Sample](uuid)
- result2.isDefined shouldBe true
- result2.get.uuid shouldBe uuid
- }
-
- "Delete" should "work" in {
- implicit val jclient: JestClient = sClient.jestClient
- val uuid = UUID.randomUUID().toString
- val sample = Sample(uuid)
- val result = sClient.index(sample)
- result shouldBe true
-
- val result2 = sClient.delete(sample.uuid, Some("sample"), Some("sample"))
- result2 shouldBe true
-
- val result3 = sClient.get(uuid)
- result3.isEmpty shouldBe true
- }
-
- "Index binary data" should "work" in {
- implicit val jclient: JestClient = bClient.jestClient
- bClient.createIndex("binaries") shouldBe true
- val mapping =
- """{
- | "test": {
- | "properties": {
- | "uuid": {
- | "type": "keyword",
- | "index": true
- | },
- | "createdDate": {
- | "type": "date"
- | },
- | "lastUpdated": {
- | "type": "date"
- | },
- | "content": {
- | "type": "binary"
- | },
- | "md5": {
- | "type": "keyword"
- | }
- | }
- | }
- |}
- """.stripMargin
- bClient.setMapping("binaries", "test", mapping) shouldBe true
- for (uuid <- Seq("png", "jpg", "pdf")) {
- val path =
- Paths.get(Thread.currentThread().getContextClassLoader.getResource(s"avatar.$uuid").getPath)
- import app.softnetwork.utils.ImageTools._
- import app.softnetwork.utils.HashTools._
- import app.softnetwork.utils.Base64Tools._
- val encoded = encodeImageBase64(path).getOrElse("")
- val binary = Binary(
- uuid,
- content = encoded,
- md5 = hashStream(new ByteArrayInputStream(decodeBase64(encoded))).getOrElse("")
- )
- bClient.index(binary) shouldBe true
- bClient.get[Binary](uuid) match {
- case Some(result) =>
- val decoded = decodeBase64(result.content)
- val out = Paths.get(s"/tmp/${path.getFileName}")
- val fos = Files.newOutputStream(out)
- fos.write(decoded)
- fos.close()
- hashFile(out).getOrElse("") shouldBe binary.md5
- case _ => fail("no result found for \"" + uuid + "\"")
- }
- }
- }
-}
diff --git a/elastic/testkit/src/test/scala/app/softnetwork/elastic/client/JestProviders.scala b/elastic/testkit/src/test/scala/app/softnetwork/elastic/client/JestProviders.scala
deleted file mode 100644
index 5e7a58e2..00000000
--- a/elastic/testkit/src/test/scala/app/softnetwork/elastic/client/JestProviders.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package app.softnetwork.elastic.client
-
-import app.softnetwork.elastic.client.jest.JestProvider
-import app.softnetwork.elastic.model.{Binary, Sample}
-import app.softnetwork.persistence.ManifestWrapper
-import app.softnetwork.persistence.person.model.Person
-import com.typesafe.config.Config
-import io.searchbox.client.JestClient
-
-object JestProviders {
-
- class PersonProvider(es: Config) extends JestProvider[Person] with ManifestWrapper[Person] {
- override protected val manifestWrapper: ManifestW = ManifestW()
-
- override lazy val config: Config = es
-
- implicit lazy val jestClient: JestClient =
- apply(elasticConfig.credentials, elasticConfig.multithreaded)
- }
-
- class SampleProvider(es: Config) extends JestProvider[Sample] with ManifestWrapper[Sample] {
- override protected val manifestWrapper: ManifestW = ManifestW()
-
- override lazy val config: Config = es
-
- implicit lazy val jestClient: JestClient =
- apply(elasticConfig.credentials, elasticConfig.multithreaded)
- }
-
- class BinaryProvider(es: Config) extends JestProvider[Binary] with ManifestWrapper[Binary] {
- override protected val manifestWrapper: ManifestW = ManifestW()
-
- override lazy val config: Config = es
-
- implicit lazy val jestClient: JestClient =
- apply(elasticConfig.credentials, elasticConfig.multithreaded)
- }
-}
diff --git a/elastic/testkit/src/test/scala/app/softnetwork/elastic/model/Binary.scala b/elastic/testkit/src/test/scala/app/softnetwork/elastic/model/Binary.scala
deleted file mode 100644
index 55e82460..00000000
--- a/elastic/testkit/src/test/scala/app/softnetwork/elastic/model/Binary.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-package app.softnetwork.elastic.model
-
-import app.softnetwork.persistence.model.Timestamped
-
-import java.time.Instant
-
-case class Binary(
- uuid: String,
- var createdDate: Instant = Instant.now(),
- var lastUpdated: Instant = Instant.now(),
- content: String,
- md5: String
-) extends Timestamped
diff --git a/elastic/testkit/src/test/scala/app/softnetwork/elastic/model/Sample.scala b/elastic/testkit/src/test/scala/app/softnetwork/elastic/model/Sample.scala
deleted file mode 100644
index bf9cf5b3..00000000
--- a/elastic/testkit/src/test/scala/app/softnetwork/elastic/model/Sample.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-package app.softnetwork.elastic.model
-
-import app.softnetwork.persistence.model.Timestamped
-
-import java.time.Instant
-
-/** Created by smanciot on 12/04/2020.
- */
-case class Sample(
- uuid: String,
- var createdDate: Instant = Instant.now(),
- var lastUpdated: Instant = Instant.now()
-) extends Timestamped
diff --git a/elastic/testkit/src/test/scala/app/softnetwork/persistence/person/MySQLPersonToElasticHandlerSpec.scala b/elastic/testkit/src/test/scala/app/softnetwork/persistence/person/MySQLPersonToElasticHandlerSpec.scala
deleted file mode 100644
index 1c2c81b4..00000000
--- a/elastic/testkit/src/test/scala/app/softnetwork/persistence/person/MySQLPersonToElasticHandlerSpec.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package app.softnetwork.persistence.person
-
-import app.softnetwork.elastic.scalatest.EmbeddedElasticTestKit
-
-class MySQLPersonToElasticHandlerSpec
- extends MySQLPersonToElasticTestKit
- with EmbeddedElasticTestKit
diff --git a/elastic/testkit/src/test/scala/app/softnetwork/persistence/person/PersonToElasticHandlerSpec.scala b/elastic/testkit/src/test/scala/app/softnetwork/persistence/person/PersonToElasticHandlerSpec.scala
deleted file mode 100644
index c7e0a28c..00000000
--- a/elastic/testkit/src/test/scala/app/softnetwork/persistence/person/PersonToElasticHandlerSpec.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package app.softnetwork.persistence.person
-
-import akka.actor.typed.ActorSystem
-import app.softnetwork.elastic.scalatest.EmbeddedElasticTestKit
-import app.softnetwork.persistence.person.query.{
- PersonToElasticProcessorStream,
- PersonToExternalProcessorStream
-}
-import app.softnetwork.persistence.query.{InMemoryJournalProvider, InMemoryOffsetProvider}
-import app.softnetwork.persistence.scalatest.InMemoryPersistenceTestKit
-import com.typesafe.config.Config
-import org.slf4j.{Logger, LoggerFactory}
-
-class PersonToElasticHandlerSpec
- extends PersonToElasticTestKit
- with InMemoryPersistenceTestKit
- with EmbeddedElasticTestKit {
-
- lazy val log: Logger = LoggerFactory getLogger getClass.getName
-
- override def person2ExternalProcessorStream: ActorSystem[_] => PersonToExternalProcessorStream =
- sys => {
- new PersonToElasticProcessorStream with InMemoryJournalProvider with InMemoryOffsetProvider {
- lazy val log: Logger = LoggerFactory getLogger getClass.getName
- override def config: Config = PersonToElasticHandlerSpec.this.elasticConfig
-
- override val forTests: Boolean = true
- override implicit def system: ActorSystem[_] = sys
- }
- }
-}
diff --git a/elastic/testkit/src/test/scala/app/softnetwork/persistence/person/PostgresPersonToElasticHandlerSpec.scala b/elastic/testkit/src/test/scala/app/softnetwork/persistence/person/PostgresPersonToElasticHandlerSpec.scala
deleted file mode 100644
index 647e5bb9..00000000
--- a/elastic/testkit/src/test/scala/app/softnetwork/persistence/person/PostgresPersonToElasticHandlerSpec.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package app.softnetwork.persistence.person
-
-import app.softnetwork.elastic.scalatest.EmbeddedElasticTestKit
-
-class PostgresPersonToElasticHandlerSpec
- extends PostgresPersonToElasticTestKit
- with EmbeddedElasticTestKit
diff --git a/jdbc/build.sbt b/jdbc/build.sbt
index c4e17691..9ca9227d 100644
--- a/jdbc/build.sbt
+++ b/jdbc/build.sbt
@@ -10,7 +10,7 @@ val akkaPersistenceJdbc = Seq(
"com.typesafe.slick" %% "slick" % Versions.slick,
"com.typesafe.slick" %% "slick-hikaricp" % Versions.slick,
"org.postgresql" % "postgresql" % Versions.postgresql,
- "com.mysql" % "mysql-connector-j" % "8.0.33"
+ "com.mysql" % "mysql-connector-j" % Versions.mysql
)
libraryDependencies ++= akkaPersistenceJdbc
diff --git a/jdbc/src/main/scala/app/softnetwork/persistence/jdbc/query/JdbcEventProcessorOffsets.scala b/jdbc/src/main/scala/app/softnetwork/persistence/jdbc/query/JdbcEventProcessorOffsets.scala
index d0c8ed8f..08918b6f 100644
--- a/jdbc/src/main/scala/app/softnetwork/persistence/jdbc/query/JdbcEventProcessorOffsets.scala
+++ b/jdbc/src/main/scala/app/softnetwork/persistence/jdbc/query/JdbcEventProcessorOffsets.scala
@@ -2,14 +2,14 @@ package app.softnetwork.persistence.jdbc.query
import com.typesafe.config.{Config, ConfigFactory}
import com.typesafe.scalalogging.StrictLogging
-import configs.Configs
+import configs.ConfigReader
case class JdbcEventProcessorOffsets(schema: String, table: String)
object JdbcEventProcessorOffsets extends StrictLogging {
def apply(config: Config): JdbcEventProcessorOffsets = {
- Configs[JdbcEventProcessorOffsets]
- .get(
+ ConfigReader[JdbcEventProcessorOffsets]
+ .read(
config.withFallback(ConfigFactory.load("softnetwork-jdbc-persistence.conf")),
"jdbc-event-processor-offsets"
)
diff --git a/jdbc/src/main/scala/app/softnetwork/persistence/jdbc/query/JdbcStateProvider.scala b/jdbc/src/main/scala/app/softnetwork/persistence/jdbc/query/JdbcStateProvider.scala
index fcd77887..3002a0c6 100644
--- a/jdbc/src/main/scala/app/softnetwork/persistence/jdbc/query/JdbcStateProvider.scala
+++ b/jdbc/src/main/scala/app/softnetwork/persistence/jdbc/query/JdbcStateProvider.scala
@@ -243,7 +243,7 @@ trait JdbcStateProvider[T <: Timestamped]
): Boolean = {
val action =
(states += (uuid, lastUpdated, deleted, state)).map(_ > 0)
- db.run(action) complete () match {
+ db.run(action).complete() match {
case Success(value) =>
log.debug(s"Insert to $tableFullName with $uuid -> $value")
value
@@ -278,7 +278,7 @@ trait JdbcStateProvider[T <: Timestamped]
(uuid, lastUpdated, deleted, state)
)
.map(_ > 0)
- db.run(action) complete () match {
+ db.run(action).complete() match {
case Success(value) =>
if (deleted) {
log.debug(s"Delete from $tableFullName with $uuid -> $value")
@@ -301,7 +301,7 @@ trait JdbcStateProvider[T <: Timestamped]
*/
def destroy(uuid: String): Boolean = {
val action = states.filter(_.uuid === uuid).delete.map(_ > 0)
- db.run(action) complete () match {
+ db.run(action).complete() match {
case Success(value) =>
log.debug(s"Delete from $tableFullName with $uuid -> $value")
value
@@ -321,7 +321,7 @@ trait JdbcStateProvider[T <: Timestamped]
def load(uuid: String): Option[T] = {
implicit val manifest: Manifest[T] = manifestWrapper.wrapped
val action = states.filter(_.uuid === uuid).result.headOption
- db.run(action) complete () match {
+ db.run(action).complete() match {
case Success(value) =>
value match {
case Some(document) =>
@@ -361,7 +361,7 @@ trait JdbcStateProvider[T <: Timestamped]
SELECT state FROM $tableFullName WHERE $query
""".as[String].map(_.toList)
}
- db.run(action) complete () match {
+ db.run(action).complete() match {
case Success(value) =>
log.debug(s"Search $tableFullName with $query -> $value")
value.map(readState)
diff --git a/project/Versions.scala b/project/Versions.scala
index 3b651723..dfeebef2 100644
--- a/project/Versions.scala
+++ b/project/Versions.scala
@@ -1,36 +1,38 @@
object Versions {
- val akka = "2.6.20"
+ val akka = "2.6.20" // TODO 2.6.20 -> 2.8.3
- val akkaHttp = "10.2.10"
+ val akkaHttp = "10.2.10" // TODO 10.2.10 -> 10.5.3
val akkaHttpJson4s = "1.39.2" //1.37.0 -> 1.39.2
- val akkaHttpSession = "0.7.0"
+ val akkaHttpSession = "0.7.1" // 0.7.0 -> 0.7.1
val tapir = "1.7.0"
val tapirHttpSession = "0.2.0"
- val akkaPersistenceJdbc = "5.0.4"
+ val akkaPersistenceJdbc = "5.0.4" // TODO 5.0.4 -> 5.2.1
- val akkaManagement = "1.1.4" // 1.1.4 -> 1.2
+ val akkaManagement = "1.1.4" // TODO 1.1.4 -> 1.4.1
- val postgresql = "42.2.18"
+ val postgresql = "42.2.18" // TODO 42.2.18 -> 42.7.7
- val scalatest = "3.2.16"
+ val mysql = "8.4.0" // 8.0.33 -> 8.4.0
- val typesafeConfig = "1.4.2"
+ val scalatest = "3.2.19" // 3.2.16 -> 3.2.19
- val kxbmap = "0.4.4"
+ val typesafeConfig = "1.4.3"
- val jackson = "2.12.7" // 2.11.4 -> 2.12.7
+ val kxbmap = "0.6.1"
+
+ val jackson = "2.19.0" // 2.12.7 -> 2.19.0
val json4s = "4.0.6" // 3.6.12 -> 4.0.6
val scalaLogging = "3.9.2"
- val logback = "1.2.3"
+ val logback = "1.2.3" // TODO 1.2.3 -> 1.5.6
val slf4j = "1.7.36"
diff --git a/project/plugins.sbt b/project/plugins.sbt
index e69cd1d1..3a932787 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -18,4 +18,4 @@ addDependencyTreePlugin
//addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.4.0")
-addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.8")
+addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.3.0")
diff --git a/server/testkit/src/main/scala/akka/http/scaladsl/testkit/PersistenceScalatestRouteTest.scala b/server/testkit/src/main/scala/akka/http/scaladsl/testkit/PersistenceScalatestRouteTest.scala
index 016ed3c6..d1247b0a 100644
--- a/server/testkit/src/main/scala/akka/http/scaladsl/testkit/PersistenceScalatestRouteTest.scala
+++ b/server/testkit/src/main/scala/akka/http/scaladsl/testkit/PersistenceScalatestRouteTest.scala
@@ -19,15 +19,16 @@ import org.scalatest.Suite
import scala.concurrent.ExecutionContextExecutor
/** Created by smanciot on 24/04/2020.
- */
+ */
trait PersistenceScalatestRouteTest
- extends ApiServer
+ extends ApiServer
with ServerTestKit
with PersistenceTestKit
with PersistenceRouteTest
with TestFrameworkInterface
with ScalatestUtils
- with Json4sSupport { this: Suite with ApiRoutes with Schema =>
+ with Json4sSupport {
+ this: Suite with ApiRoutes with Schema =>
override protected def createActorSystem(): ActorSystem = {
typedSystem()
@@ -73,7 +74,7 @@ trait PersistenceScalatestRouteTest
@deprecated("this method has been replaced by findHeader and will be removed", since = "0.3.1.1")
def findCookie(name: String): HttpHeader => Option[HttpCookiePair] = {
case Cookie(cookies) => cookies.find(_.name == name)
- case _ => None
+ case _ => None
}
def extractHeaders(headers: Seq[HttpHeader]): Seq[HttpHeader] = {
@@ -107,15 +108,15 @@ trait PersistenceScalatestRouteTest
}
def headerValue(name: String): HttpHeader => Option[String] = {
- case Cookie(cookies) => cookies.find(_.name == name).map(_.value)
+ case Cookie(cookies) => cookies.find(_.name == name).map(_.value)
case r: RawHeader if r.name == name => Some(r.value)
- case _ => None
+ case _ => None
}
def findHeader(name: String): HttpHeader => Option[HttpHeader] = {
case c: Cookie if c.cookies.exists(_.name == name) => Some(c)
- case other if other.name() == name => Some(other)
- case _ => None
+ case other if other.name() == name => Some(other)
+ case _ => None
}
def existHeader(name: String): HttpHeader => Boolean = header =>
@@ -123,7 +124,7 @@ trait PersistenceScalatestRouteTest
}
trait InMemoryPersistenceScalatestRouteTest
- extends PersistenceScalatestRouteTest
+ extends PersistenceScalatestRouteTest
with InMemoryPersistenceTestKit {
_: Suite with ApiRoutes =>
}
@@ -132,7 +133,7 @@ import akka.http.scaladsl.Http
import akka.http.scaladsl.client.RequestBuilding
import akka.http.scaladsl.model.HttpEntity.ChunkStreamPart
import akka.http.scaladsl.model._
-import akka.http.scaladsl.model.headers.{ Host, Upgrade, `Sec-WebSocket-Protocol` }
+import akka.http.scaladsl.model.headers.{Host, Upgrade, `Sec-WebSocket-Protocol`}
import akka.http.scaladsl.server._
import akka.http.scaladsl.settings.ParserSettings
import akka.http.scaladsl.settings.RoutingSettings
@@ -142,11 +143,11 @@ import akka.http.scaladsl.util.FastFuture._
import akka.stream.scaladsl.Source
import akka.testkit.TestKit
import akka.util.ConstantFun
-import com.typesafe.config.{ Config, ConfigFactory }
+import com.typesafe.config.{Config, ConfigFactory}
import scala.collection.immutable
import scala.concurrent.duration._
-import scala.concurrent.{ Await, ExecutionContext, Future }
+import scala.concurrent.{Await, ExecutionContext, Future}
import scala.reflect.ClassTag
import scala.util.DynamicVariable
@@ -164,11 +165,13 @@ trait PersistenceRouteTest extends RequestBuilding with WSTestRequestBuilding wi
.filter(_ != '$')
def testConfigSource: String = ""
+
def testConfig: Config = {
val source = testConfigSource
val config = if (source.isEmpty) ConfigFactory.empty() else ConfigFactory.parseString(source)
config.withFallback(ConfigFactory.load())
}
+
implicit lazy val system: ActorSystem = createActorSystem()
implicit lazy val executor: ExecutionContextExecutor = system.dispatcher
implicit lazy val materializer: Materializer = SystemMaterializer(system).materializer
@@ -176,6 +179,7 @@ trait PersistenceRouteTest extends RequestBuilding with WSTestRequestBuilding wi
def cleanUp(): Unit = TestKit.shutdownActorSystem(system)
private val dynRR = new DynamicVariable[RouteTestResult](null)
+
private def result =
if (dynRR.value ne null) dynRR.value
else sys.error("This value is only available inside of a `check` construct!")
@@ -185,38 +189,57 @@ trait PersistenceRouteTest extends RequestBuilding with WSTestRequestBuilding wi
private def responseSafe = if (dynRR.value ne null) dynRR.value.response else ""
def handled: Boolean = result.handled
+
def response: HttpResponse = result.response
+
def responseEntity: HttpEntity = result.entity
+
private def rawResponse: HttpResponse = result.rawResponse
+
def chunks: immutable.Seq[HttpEntity.ChunkStreamPart] = result.chunks
+
def chunksStream: Source[ChunkStreamPart, Any] = result.chunksStream
- def entityAs[T: FromEntityUnmarshaller: ClassTag](implicit timeout: Duration = 1.second): T = {
+
+ def entityAs[T: FromEntityUnmarshaller : ClassTag](implicit timeout: Duration = 1.second): T = {
def msg(e: Throwable) = s"Could not unmarshal entity to type '${implicitly[ClassTag[T]]}' for `entityAs` assertion: $e\n\nResponse was: $responseSafe"
+
Await.result(Unmarshal(responseEntity).to[T].fast.recover[T] { case error => failTest(msg(error)) }, timeout)
}
- def responseAs[T: FromResponseUnmarshaller: ClassTag](implicit timeout: Duration = 1.second): T = {
+
+ def responseAs[T: FromResponseUnmarshaller : ClassTag](implicit timeout: Duration = 1.second): T = {
def msg(e: Throwable) = s"Could not unmarshal response to type '${implicitly[ClassTag[T]]}' for `responseAs` assertion: $e\n\nResponse was: $responseSafe"
+
Await.result(Unmarshal(response).to[T].fast.recover[T] { case error => failTest(msg(error)) }, timeout)
}
+
def contentType: ContentType = rawResponse.entity.contentType
+
def mediaType: MediaType = contentType.mediaType
+
def charsetOption: Option[HttpCharset] = contentType.charsetOption
+
def charset: HttpCharset = charsetOption getOrElse sys.error("Binary entity does not have charset")
+
def headers: immutable.Seq[HttpHeader] = rawResponse.headers
- def header[T >: Null <: HttpHeader: ClassTag]: Option[T] = rawResponse.header[T](implicitly[ClassTag[T]])
+
+ def header[T >: Null <: HttpHeader : ClassTag]: Option[T] = rawResponse.header[T](implicitly[ClassTag[T]])
+
def header(name: String): Option[HttpHeader] = rawResponse.headers.find(_.is(name.toLowerCase))
+
def status: StatusCode = rawResponse.status
def closingExtension: String = chunks.lastOption match {
case Some(HttpEntity.LastChunk(extension, _)) => extension
- case _ => ""
+ case _ => ""
}
+
def trailer: immutable.Seq[HttpHeader] = chunks.lastOption match {
case Some(HttpEntity.LastChunk(_, trailer)) => trailer
- case _ => Nil
+ case _ => Nil
}
def rejections: immutable.Seq[Rejection] = result.rejections
+
def rejection: Rejection = {
val r = rejections
if (r.size == 1) r.head else failTest("Expected a single rejection but got %s (%s)".format(r.size, r))
@@ -265,21 +288,27 @@ trait PersistenceRouteTest extends RequestBuilding with WSTestRequestBuilding wi
abstract class TildeArrow[A, B] {
type Out
+
def apply(request: HttpRequest, f: A => B): Out
}
case class DefaultHostInfo(host: Host, securedConnection: Boolean)
+
object DefaultHostInfo {
implicit def defaultHost: DefaultHostInfo = DefaultHostInfo(Host("example.com"), securedConnection = false)
}
+
object TildeArrow {
implicit object InjectIntoRequestTransformer extends TildeArrow[HttpRequest, HttpRequest] {
type Out = HttpRequest
+
def apply(request: HttpRequest, f: HttpRequest => HttpRequest) = f(request)
}
- implicit def injectIntoRoute(implicit timeout: RouteTestTimeout, defaultHostInfo: DefaultHostInfo): TildeArrow[RequestContext, Future[RouteResult]] { type Out = RouteTestResult } =
+
+ implicit def injectIntoRoute(implicit timeout: RouteTestTimeout, defaultHostInfo: DefaultHostInfo): TildeArrow[RequestContext, Future[RouteResult]] {type Out = RouteTestResult} =
new TildeArrow[RequestContext, Future[RouteResult]] {
type Out = RouteTestResult
+
def apply(request: HttpRequest, route: Route): Out = {
if (request.method == HttpMethods.HEAD && ServerSettings(system).transparentHeadRequests)
failTest("`akka.http.server.transparent-head-requests = on` not supported in PersistenceRouteTest using `~>`. Use `~!>` instead " +
@@ -310,13 +339,15 @@ trait PersistenceRouteTest extends RequestBuilding with WSTestRequestBuilding wi
abstract class TildeBangArrow[A, B] {
type Out
+
def apply(request: HttpRequest, f: A => B): Out
}
object TildeBangArrow {
- implicit def injectIntoRoute(implicit timeout: RouteTestTimeout, serverSettings: ServerSettings): TildeBangArrow[RequestContext, Future[RouteResult]] { type Out = RouteTestResult } =
+ implicit def injectIntoRoute(implicit timeout: RouteTestTimeout, serverSettings: ServerSettings): TildeBangArrow[RequestContext, Future[RouteResult]] {type Out = RouteTestResult} =
new TildeBangArrow[RequestContext, Future[RouteResult]] {
type Out = RouteTestResult
+
def apply(request: HttpRequest, route: Route): Out = {
val routeTestResult = new RouteTestResult(timeout.duration)
val responseF = PersistenceRouteTest.runRouteClientServer(request, route, serverSettings)
@@ -327,6 +358,7 @@ trait PersistenceRouteTest extends RequestBuilding with WSTestRequestBuilding wi
}
}
}
+
private[http] object PersistenceRouteTest {
def runRouteClientServer(request: HttpRequest, route: Route, serverSettings: ServerSettings)(implicit system: ActorSystem): Future[HttpResponse] = {
import system.dispatcher
diff --git a/session/common/build.sbt b/session/common/build.sbt
index 2f9393f5..7bd023c6 100644
--- a/session/common/build.sbt
+++ b/session/common/build.sbt
@@ -13,7 +13,7 @@ val akkaHttpSession: Seq[ModuleID] = Seq(
)
libraryDependencies ++= Seq(
- "app.softnetwork.protobuf" %% "scalapb-extensions" % "0.1.7"
+ "app.softnetwork.protobuf" %% "scalapb-extensions" % "0.2.0"
) ++ akkaHttpSession ++ tapirHttpSession
Compile / unmanagedResourceDirectories += baseDirectory.value / "src/main/protobuf"
diff --git a/session/common/src/main/scala/app/softnetwork/session/model/JwtClaimsEncoder.scala b/session/common/src/main/scala/app/softnetwork/session/model/JwtClaimsEncoder.scala
index d94951d8..fc3c39ce 100644
--- a/session/common/src/main/scala/app/softnetwork/session/model/JwtClaimsEncoder.scala
+++ b/session/common/src/main/scala/app/softnetwork/session/model/JwtClaimsEncoder.scala
@@ -31,7 +31,7 @@ trait JwtClaimsEncoder extends SessionEncoder[JwtClaims] with Completion {
)
(updatedJwtClaims.iss match {
case Some(iss) =>
- (loadApiKey(iss) complete ()).toOption.flatten
+ loadApiKey(iss).complete().toOption.flatten
case _ => None
}) match {
case Some(apiKey) if apiKey.clientSecret.isDefined =>
@@ -51,7 +51,7 @@ trait JwtClaimsEncoder extends SessionEncoder[JwtClaims] with Completion {
else jwtClaims.iss
val innerConfig = (maybeClientId match {
case Some(clientId) =>
- (loadApiKey(clientId) complete ()).toOption.flatten.flatMap(_.clientSecret)
+ loadApiKey(clientId).complete().toOption.flatten.flatMap(_.clientSecret)
case _ => None
}) match {
case Some(clientSecret) =>
diff --git a/session/testkit/src/main/scala/app/softnetwork/session/scalatest/RefreshableSessionTestKit.scala b/session/testkit/src/main/scala/app/softnetwork/session/scalatest/RefreshableSessionTestKit.scala
index 5e86844b..26362a90 100644
--- a/session/testkit/src/main/scala/app/softnetwork/session/scalatest/RefreshableSessionTestKit.scala
+++ b/session/testkit/src/main/scala/app/softnetwork/session/scalatest/RefreshableSessionTestKit.scala
@@ -19,7 +19,7 @@ trait RefreshableSessionTestKit[T <: SessionData with SessionDataDecorator[T]]
value match {
case Some(value) =>
refreshable.refreshTokenManager
- .sessionFromValue(value) complete () match {
+ .sessionFromValue(value).complete() match {
case Success(value) =>
value match {
case _ @SessionResult.CreatedFromToken(session) => Some(session)
diff --git a/session/testkit/src/main/scala/app/softnetwork/session/scalatest/SessionTestKit.scala b/session/testkit/src/main/scala/app/softnetwork/session/scalatest/SessionTestKit.scala
index 6c6d9376..ff01ac6a 100644
--- a/session/testkit/src/main/scala/app/softnetwork/session/scalatest/SessionTestKit.scala
+++ b/session/testkit/src/main/scala/app/softnetwork/session/scalatest/SessionTestKit.scala
@@ -48,7 +48,7 @@ trait SessionTestKit[T <: SessionData with SessionDataDecorator[T]]
}
lines += "***** End Client Headers *****"
log.info(lines)
- request.withHeaders(request.headers ++ clientHeaders: _*)
+ request.withHeaders(request.headers ++ clientHeaders)
}
def createSession(