From 8d3e7de1c4ab80b84d8d7c1dce46c29913f1bd20 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Sun, 20 May 2018 20:09:46 +0100 Subject: [PATCH 01/30] Adding support for multi arg aggregate calls --- .../builder/clauses/QueryCondition.scala | 15 +++++++++++++++ .../phantom/builder/query/SelectQuery.scala | 17 +++++++++++++++++ .../db/specialized/SelectFunctionsTesting.scala | 14 ++++++++++++++ 3 files changed, 46 insertions(+) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala index 03c5917fa..8947308f3 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala @@ -124,6 +124,21 @@ object OperatorClause extends Clause { object TypedClause extends Clause { class Condition[RR](override val qb: CQLQuery, val extractor: Row => RR) extends QueryCondition(qb, Nil) + + abstract class TypedProjection[HL](queries: List[CQLQuery]) extends QueryCondition[HNil]( + QueryBuilder.Utils.join(queries: _*), + Nil + ) { + def extractor: Row => HL + } + + object TypedProjection { + implicit def condition2[A1, A2](source: (Condition[A1], Condition[A2])): TypedProjection[(A1, A2)] = { + new TypedProjection[(A1, A2)](List(source._1.qb, source._2.qb)) { + override def extractor: Row => (A1, A2) = r => source._1.extractor(r) -> source._2.extractor(r) + } + } + } } object DeleteClause extends Clause { diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala index ac96aed3d..2fbd07208 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala @@ -235,6 +235,23 @@ private[phantom] class RootSelectBlock[ } } + def function[RR](f1: T => TypedClause.TypedProjection[RR])( + implicit keySpace: KeySpace + ): SelectQuery.Default[T, RR] = { + new SelectQuery( + table, + f1(table).extractor, + QueryBuilder.Select.select(table.tableName, keySpace.name, f1(table).qb), + Nil, + WherePart.empty, + OrderPart.empty, + LimitedPart.empty, + FilteringPart.empty, + UsingPart.empty, + count = false + ) + } + def function[RR](f1: TypedClause.Condition[RR])( implicit keySpace: KeySpace ): SelectQuery.Default[T, RR] = { diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala index a2d3220f0..7ec8534ca 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala @@ -433,4 +433,18 @@ class SelectFunctionsTesting extends PhantomSuite { res shouldBe defined } } + + it should "retrieve the average of a Long field from Cassandra" in { + val record = gen[PrimitiveRecord] + + val chain = for { + _ <- database.primitives.store(record).future() + res <- database.primitives.select.function(t => avg(t.long) -> max(t.long)).where(_.pkey eqs record.pkey).aggregate() + } yield res + + whenReady(chain) { res => + res shouldBe defined + } + } + } From 0f4be622c55e038e5db64bef4f4e7ccf26962b35 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Sun, 20 May 2018 20:11:10 +0100 Subject: [PATCH 02/30] Using tuple implicits --- .../builder/clauses/QueryCondition.scala | 7 +++++++ .../phantom/builder/query/SelectQuery.scala | 17 ----------------- 2 files changed, 7 insertions(+), 17 deletions(-) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala index 8947308f3..0a948fd9e 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala @@ -133,6 +133,13 @@ object TypedClause extends Clause { } object TypedProjection { + + implicit def conditoin1[A1](source: Condition[A1]): TypedProjection[A1] = { + new TypedProjection[A1](List(source.qb)) { + override def extractor: Row => A1 = r => source.extractor(r) + } + } + implicit def condition2[A1, A2](source: (Condition[A1], Condition[A2])): TypedProjection[(A1, A2)] = { new TypedProjection[(A1, A2)](List(source._1.qb, source._2.qb)) { override def extractor: Row => (A1, A2) = r => source._1.extractor(r) -> source._2.extractor(r) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala index 2fbd07208..d4b2ed3ad 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala @@ -269,23 +269,6 @@ private[phantom] class RootSelectBlock[ ) } - def function[RR](f1: T => TypedClause.Condition[RR])( - implicit keySpace: KeySpace - ): SelectQuery.Default[T, RR] = { - new SelectQuery( - table, - f1(table).extractor, - QueryBuilder.Select.select(table.tableName, keySpace.name, f1(table).qb), - Nil, - WherePart.empty, - OrderPart.empty, - LimitedPart.empty, - FilteringPart.empty, - UsingPart.empty, - count = false - ) - } - @implicitNotFound("You haven't provided a KeySpace in scope. Use a Connector to automatically inject one.") def count()( implicit keySpace: KeySpace, From 6f785e6a8f68c8cf6d2a41fbc3cf8d4ae71328fd Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Mon, 21 May 2018 11:07:47 +0100 Subject: [PATCH 03/30] Adding ~ combinators for multi-argument selectors --- .../builder/clauses/QueryCondition.scala | 32 +++++++++++-------- .../phantom/builder/query/SelectQuery.scala | 18 +++++++---- 2 files changed, 29 insertions(+), 21 deletions(-) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala index 0a948fd9e..070163ebf 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/clauses/QueryCondition.scala @@ -123,26 +123,30 @@ object OperatorClause extends Clause { } object TypedClause extends Clause { - class Condition[RR](override val qb: CQLQuery, val extractor: Row => RR) extends QueryCondition(qb, Nil) + class Condition[RR](override val qb: CQLQuery, val extractor: Row => RR) extends QueryCondition(qb, Nil) { outer => - abstract class TypedProjection[HL](queries: List[CQLQuery]) extends QueryCondition[HNil]( - QueryBuilder.Utils.join(queries: _*), - Nil - ) { - def extractor: Row => HL + def ~[BB](other: Condition[BB]): TypedProjection[BB :: RR :: HNil] = new TypedProjection[BB :: RR :: HNil](List(qb, other.qb)) { + override def extractor: Row => BB :: RR :: HNil = r => { + other.extractor(r) :: outer.extractor(r) :: HNil + } + } } - object TypedProjection { + abstract class TypedProjection[HL <: HList](queries: List[CQLQuery]) extends QueryCondition[HNil]( + QueryBuilder.Utils.join(queries.reverse: _*), + Nil + ) { outer => + def extractor: Row => HL - implicit def conditoin1[A1](source: Condition[A1]): TypedProjection[A1] = { - new TypedProjection[A1](List(source.qb)) { - override def extractor: Row => A1 = r => source.extractor(r) - } + def ~[RR](other: Condition[RR]): TypedProjection[RR :: HL] = new TypedProjection[RR :: HL](other.qb :: queries) { + override def extractor: Row => RR :: HL = r => other.extractor(r) :: outer.extractor(r) } + } - implicit def condition2[A1, A2](source: (Condition[A1], Condition[A2])): TypedProjection[(A1, A2)] = { - new TypedProjection[(A1, A2)](List(source._1.qb, source._2.qb)) { - override def extractor: Row => (A1, A2) = r => source._1.extractor(r) -> source._2.extractor(r) + object TypedProjection { + implicit def condition1[A1](source: Condition[A1]): TypedProjection[A1 :: HNil] = { + new TypedProjection[A1 :: HNil](List(source.qb)) { + override def extractor: Row => A1 :: HNil = r => source.extractor(r) :: HNil } } } diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala index d4b2ed3ad..512a0f4e2 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala @@ -28,8 +28,8 @@ import com.outworkers.phantom.builder.syntax.CQLSyntax import com.outworkers.phantom.builder.{ConsistencyBound, LimitBound, OrderBound, WhereBound, _} import com.outworkers.phantom.connectors.KeySpace import com.outworkers.phantom.{CassandraTable, Row} -import shapeless.ops.hlist.{Prepend, Reverse} -import shapeless.{::, =:!=, HList, HNil} +import shapeless.ops.hlist.{Prepend, Reverse, Tupler} +import shapeless.{::, =:!=, Generic, HList, HNil} import scala.annotation.implicitNotFound import scala.concurrent.ExecutionContextExecutor @@ -235,13 +235,17 @@ private[phantom] class RootSelectBlock[ } } - def function[RR](f1: T => TypedClause.TypedProjection[RR])( - implicit keySpace: KeySpace - ): SelectQuery.Default[T, RR] = { + def function[ + HL <: HList, + TP + ](projection: T => TypedClause.TypedProjection[HL])( + implicit keySpace: KeySpace, + ev: Tupler.Aux[HL, TP] + ): SelectQuery.Default[T, TP] = { new SelectQuery( table, - f1(table).extractor, - QueryBuilder.Select.select(table.tableName, keySpace.name, f1(table).qb), + row => ev.apply(projection(table).extractor(row)), + QueryBuilder.Select.select(table.tableName, keySpace.name, projection(table).qb), Nil, WherePart.empty, OrderPart.empty, From 84dbc22334a9a46bc112303680d4227c2e867f90 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Mon, 21 May 2018 11:13:12 +0100 Subject: [PATCH 04/30] Adding multi aggregate functions. --- .../phantom/builder/query/SelectQuery.scala | 6 ++++-- .../query/execution/ResultQueryInterface.scala | 9 ++++++--- .../outworkers/phantom/ops/SelectQueryOps.scala | 2 +- .../db/specialized/SelectFunctionsTesting.scala | 16 +++++++++++++++- 4 files changed, 26 insertions(+), 7 deletions(-) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala index 512a0f4e2..3aa93440e 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala @@ -237,14 +237,16 @@ private[phantom] class RootSelectBlock[ def function[ HL <: HList, + Rev <: HList, TP ](projection: T => TypedClause.TypedProjection[HL])( implicit keySpace: KeySpace, - ev: Tupler.Aux[HL, TP] + rev: Reverse.Aux[HL, Rev], + ev: Tupler.Aux[Rev, TP] ): SelectQuery.Default[T, TP] = { new SelectQuery( table, - row => ev.apply(projection(table).extractor(row)), + row => ev.apply(rev.apply(projection(table).extractor(row))), QueryBuilder.Select.select(table.tableName, keySpace.name, projection(table).qb), Nil, WherePart.empty, diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/execution/ResultQueryInterface.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/execution/ResultQueryInterface.scala index b925d405f..0622f1dc9 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/execution/ResultQueryInterface.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/execution/ResultQueryInterface.scala @@ -37,8 +37,11 @@ abstract class ResultQueryInterface[ protected[this] def flattenedOption[Inner]( row: Option[Row] - )(implicit ev: R <:< Option[Inner]): Option[Inner] = { - row flatMap fromRow + )(implicit ev: R <:< Tuple1[Option[Inner]]): Option[Inner] = { + row match { + case Some(r) => fromRow(r)._1 + case None => None + } } protected[this] def directMapper( @@ -59,7 +62,7 @@ abstract class ResultQueryInterface[ private[phantom] def optionalFetch[Inner](source: F[ResultSet])( implicit ec: ExecutionContextExecutor, - ev: R <:< Option[Inner] + ev: R <:< Tuple1[Option[Inner]] ): F[Option[Inner]] = { source map { res => flattenedOption(res.value()) } } diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala index a7683f94b..cdd071904 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala @@ -73,7 +73,7 @@ class SelectQueryOps[ def aggregate[Inner]()( implicit session: Session, ev: Limit =:= Unlimited, - opt: Record <:< Option[Inner], + opt: Record <:< Tuple1[Option[Inner]], ec: ExecutionContextExecutor ): F[Option[Inner]] = { val enforceLimit = if (query.count) LimitedPart.empty else query.limitedPart append QueryBuilder.limit(1.toString) diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala index 7ec8534ca..68e22438d 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala @@ -434,12 +434,26 @@ class SelectFunctionsTesting extends PhantomSuite { } } + it should "retrieve the result of multiple aggregate operators" in { + val record = gen[PrimitiveRecord] + + val chain = for { + _ <- database.primitives.store(record).future() + res <- database.primitives.select.function(t => avg(t.long) ~ max(t.long)).where(_.pkey eqs record.pkey).aggregate() + } yield res + + whenReady(chain) { res => + res shouldBe defined + } + } + + it should "retrieve the average of a Long field from Cassandra" in { val record = gen[PrimitiveRecord] val chain = for { _ <- database.primitives.store(record).future() - res <- database.primitives.select.function(t => avg(t.long) -> max(t.long)).where(_.pkey eqs record.pkey).aggregate() + res <- database.primitives.select.function(t => avg(t.long) ~ max(t.long)).where(_.pkey eqs record.pkey).aggregate() } yield res whenReady(chain) { res => From d70120503c55ef60bb7f0d4a8995bc84f796eff4 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Mon, 21 May 2018 11:46:38 +0100 Subject: [PATCH 05/30] Spacing things out --- .../builder/query/db/specialized/SelectFunctionsTesting.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala index 68e22438d..12623a654 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala @@ -453,7 +453,9 @@ class SelectFunctionsTesting extends PhantomSuite { val chain = for { _ <- database.primitives.store(record).future() - res <- database.primitives.select.function(t => avg(t.long) ~ max(t.long)).where(_.pkey eqs record.pkey).aggregate() + res <- database.primitives.select.function(t => avg(t.long) ~ max(t.long)) + .where(_.pkey eqs record.pkey) + .one() } yield res whenReady(chain) { res => From d98c69a9d383cb15dcea1429e91334d3a7e60944 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Mon, 21 May 2018 13:03:42 +0100 Subject: [PATCH 06/30] Adding an HList extractor for nested options --- .../com/outworkers/phantom/SelectTable.scala | 9 +++- .../phantom/ops/AggregateSequence.scala | 46 +++++++++++++++++++ .../specialized/SelectFunctionsTesting.scala | 5 +- 3 files changed, 58 insertions(+), 2 deletions(-) create mode 100644 phantom-dsl/src/main/scala/com/outworkers/phantom/ops/AggregateSequence.scala diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/SelectTable.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/SelectTable.scala index f64df5949..684fa2d0d 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/SelectTable.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/SelectTable.scala @@ -15,13 +15,20 @@ */ package com.outworkers.phantom +import com.outworkers.phantom.builder.QueryBuilder +import com.outworkers.phantom.builder.clauses.TypedClause import com.outworkers.phantom.builder.ops.SelectColumn import com.outworkers.phantom.builder.query._ +import com.outworkers.phantom.connectors.KeySpace +import shapeless.HList +import shapeless.ops.hlist.{Reverse, Tupler} trait SelectTable[T <: CassandraTable[T, R], R] { self: CassandraTable[T, R] => - def select: RootSelectBlock[T, R] = RootSelectBlock[T, R](this.asInstanceOf[T], Nil, fromRow) + def table: T = this.asInstanceOf[T] + + def select: RootSelectBlock[T, R] = RootSelectBlock[T, R](table, Nil, fromRow) def select[A](f1: T => SelectColumn[A]): RootSelectBlock[T, A] = { val t = this.asInstanceOf[T] diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/AggregateSequence.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/AggregateSequence.scala new file mode 100644 index 000000000..9a9884bd1 --- /dev/null +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/AggregateSequence.scala @@ -0,0 +1,46 @@ +/* + * Copyright 2013 - 2017 Outworkers Ltd. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.outworkers.phantom.ops + +import shapeless._ +import shapeless.ops.tuple.Prepend + +/* +trait AggregateSequence[L <: HList] extends DepFn1[L] + +object AggregateSequence { + type Aux[L <: HList, Out0] = AggregateSequence[L] { type Out = Out0 } + + implicit def hnilAggregateSequence: Aux[HNil, Option[Unit]] = + new AggregateSequence[HNil] { + type Out = Option[Unit] + def apply(l: HNil): Option[Unit] = Some(()) + } + + implicit def hconsAggregateSequence[H, T <: HList, OutT]( + implicit fst: Aux[T, Option[OutT]], + pre: Prepend[Tuple1[H], OutT] + ): Aux[Option[H] :: T, Option[pre.Out]] = new AggregateSequence[Option[H] :: T] { + type Out = Option[pre.Out] + + def apply(l: Option[H] :: T): Option[pre.Out] = { + l.head.flatMap(fst(l.tail)).map { + case (h, t) => pre(Tuple1(h), t) + } + } + } + +}*/ \ No newline at end of file diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala index 12623a654..ea88adcd3 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala @@ -439,7 +439,10 @@ class SelectFunctionsTesting extends PhantomSuite { val chain = for { _ <- database.primitives.store(record).future() - res <- database.primitives.select.function(t => avg(t.long) ~ max(t.long)).where(_.pkey eqs record.pkey).aggregate() + res <- database.primitives.select + .function(t => avg(t.long) ~ max(t.long)) + .where(_.pkey eqs record.pkey) + .aggregate() } yield res whenReady(chain) { res => From a0ee196a44c2044d4f526f168349155a61c7fb16 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Mon, 21 May 2018 13:25:38 +0100 Subject: [PATCH 07/30] removing some more unused imports --- .../outworkers/phantom/builder/primitives/Primitive.scala | 1 - .../com/outworkers/phantom/builder/query/SelectQuery.scala | 4 +--- .../query/db/specialized/SelectFunctionsTesting.scala | 6 ++++-- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitive.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitive.scala index a2c87c49b..84ae0109a 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitive.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitive.scala @@ -558,7 +558,6 @@ object Primitive { */ implicit def materializer[T]: Primitive[T] = macro PrimitiveMacro.materializer[T] - def iso[A, B : Primitive](r: B => A)(w: A => B): Primitive[A] = derive[A, B](w)(r) /** diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala index 3aa93440e..ecebaced0 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/SelectQuery.scala @@ -15,8 +15,6 @@ */ package com.outworkers.phantom.builder.query -import java.nio.ByteBuffer - import com.datastax.driver.core.{ConsistencyLevel, Session} import com.outworkers.phantom.builder.clauses._ import com.outworkers.phantom.builder.ops.TokenizerKey @@ -29,7 +27,7 @@ import com.outworkers.phantom.builder.{ConsistencyBound, LimitBound, OrderBound, import com.outworkers.phantom.connectors.KeySpace import com.outworkers.phantom.{CassandraTable, Row} import shapeless.ops.hlist.{Prepend, Reverse, Tupler} -import shapeless.{::, =:!=, Generic, HList, HNil} +import shapeless.{::, =:!=, HList, HNil} import scala.annotation.implicitNotFound import scala.concurrent.ExecutionContextExecutor diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala index ea88adcd3..db86a317f 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala @@ -38,8 +38,10 @@ class SelectFunctionsTesting extends PhantomSuite { val chain = for { _ <- database.recipes.store(record).future() - timestamp <- database.recipes.select.function(t => writetime(t.description)) - .where(_.url eqs record.url).one() + timestamp <- database.recipes.select + .function(t => writetime(t.description)) + .where(_.url eqs record.url) + .aggregate() } yield timestamp whenReady(chain) { res => From 6180e8c6332ed716b6e747ce6b609a7da660ae2d Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Wed, 23 May 2018 15:15:48 +0100 Subject: [PATCH 08/30] Adding logging for when routing key functions are applied --- .../phantom/builder/ops/PartitionQueryColumn.scala | 10 ++++++++-- .../outworkers/phantom/builder/ops/TokenOps.scala | 12 ++++++++++++ .../com/outworkers/phantom/builder/ops/package.scala | 4 +++- .../phantom/builder/query/QueryOptions.scala | 8 +++++++- .../query/prepared/PreparedInsertQueryTest.scala | 1 - .../phantom/finagle/SelectFunctionsTesting.scala | 3 ++- 6 files changed, 32 insertions(+), 6 deletions(-) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/PartitionQueryColumn.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/PartitionQueryColumn.scala index 8fd7cb42d..1162f9a22 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/PartitionQueryColumn.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/PartitionQueryColumn.scala @@ -40,7 +40,10 @@ case class PartitionQueryColumn[RR](name: String)( )(fn: (String, String) => CQLQuery)(implicit pp: Primitive[R]): WhereClause.PartitionCondition = { new WhereClause.PartitionCondition( fn(name, pp.asCql(value)), { - session: Session => pp.serialize(value, session.protocolVersion) + session: Session => RoutingKeyValue( + cql = pp.asCql(value), + bytes = pp.serialize(value, session.protocolVersion) + ) } ) } @@ -106,7 +109,10 @@ case class PartitionQueryColumn[RR](name: String)( ): WhereClause.PartitionCondition = { new WhereClause.PartitionCondition( QueryBuilder.Where.in(name, values.map(p.asCql)), { - session: Session => ev.serialize(ListValue(values), session.protocolVersion) + session: Session => RoutingKeyValue( + s"List(${QueryBuilder.Utils.join(values.map(p.asCql)).queryString}", + ev.serialize(ListValue(values), session.protocolVersion) + ) } ) } diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/TokenOps.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/TokenOps.scala index d4b9b5390..50f632e93 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/TokenOps.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/TokenOps.scala @@ -15,11 +15,23 @@ */ package com.outworkers.phantom.builder.ops +import java.nio.ByteBuffer + import com.outworkers.phantom.builder.primitives.Primitive import com.outworkers.phantom.builder.query.prepared.PrepareMark import com.outworkers.phantom.column.AbstractColumn import shapeless._ +/** + * + * @param cql + * @param bytes + */ +case class RoutingKeyValue( + cql: String, + bytes: ByteBuffer +) + object TokenTypes { sealed trait Root trait ValueToken extends Root diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/package.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/package.scala index b0ab51e7b..8995ec19e 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/package.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/package.scala @@ -20,5 +20,7 @@ import java.nio.ByteBuffer import com.datastax.driver.core.Session package object ops { - type TokenizerKey = (Session => ByteBuffer) + + + type TokenizerKey = (Session => RoutingKeyValue) } diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/QueryOptions.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/QueryOptions.scala index 1240efadb..c4275674a 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/QueryOptions.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/QueryOptions.scala @@ -17,6 +17,7 @@ package com.outworkers.phantom.builder.query import com.datastax.driver.core._ import com.datastax.driver.core.policies.TokenAwarePolicy +import com.outworkers.phantom.Manager import com.outworkers.phantom.builder.ops.TokenizerKey trait Modifier extends (Statement => Statement) @@ -31,8 +32,13 @@ case class RoutingKeyModifier( val policy = session.getCluster.getConfiguration.getPolicies.getLoadBalancingPolicy if (policy.isInstanceOf[TokenAwarePolicy] && tokens.nonEmpty) { + + val routingKeys = tokens.map(_.apply(session)) + + Manager.logger.debug(s"Routing key tokens found. Settings routing key to ${routingKeys.map(_.cql).mkString("(", ",", ")")}") + st - .setRoutingKey(tokens.map(_.apply(session)): _*) + .setRoutingKey(routingKeys.map(_.bytes):_*) .setKeyspace(session.getLoggedKeyspace) } else { st diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/prepared/PreparedInsertQueryTest.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/prepared/PreparedInsertQueryTest.scala index 114116247..b9d8dace5 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/prepared/PreparedInsertQueryTest.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/prepared/PreparedInsertQueryTest.scala @@ -22,7 +22,6 @@ import com.outworkers.phantom.tables.{DerivedRecord, PrimitiveCassandra22, Primi import com.outworkers.util.samplers._ import io.circe.generic.auto._ import io.circe.syntax._ -import io.circe.{Encoder, Json} class PreparedInsertQueryTest extends PhantomSuite { diff --git a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala index ff501e586..6462de934 100644 --- a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala +++ b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala @@ -38,7 +38,8 @@ class SelectFunctionsTesting extends PhantomSuite with TwitterFutures { _ <- database.recipes.store(record).future() timestamp <- database.recipes.select .function(t => writetime(t.description)) - .where(_.url eqs record.url).one() + .where(_.url eqs record.url) + .one() } yield timestamp whenReady(chain) { res => From b6b41eb8ccc3cc4bee774f29acf19ba919c3faa1 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Fri, 25 May 2018 10:16:06 +0100 Subject: [PATCH 09/30] Correcting some typos --- readme/src/main/tut/querying/select.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/readme/src/main/tut/querying/select.md b/readme/src/main/tut/querying/select.md index 5bf169491..381a3799c 100644 --- a/readme/src/main/tut/querying/select.md +++ b/readme/src/main/tut/querying/select.md @@ -12,9 +12,9 @@ and demonstrate the available select API. We will create a `AnalyticsEntries` table, where we hold information about a car's state over time for 2 properties we care about, namely the `velocity` and `tirePressure`. We leverage the `TimeUUID` Cassandra type to store information -about timestamps, order the logs we receive in descending order(most recent record first), and prevent any colissions. +about timestamps, order the logs we receive in descending order(most recent record first), and prevent any collisions. -If we would just use a timestamp type, if we were to receive two logs for the same car at the exact same timestamp, +If we would just use the normal timestamp type, if we were to receive two logs for the same car at the exact same timestamp, the entries would override each other in Cassandra, because in effect they would have the same partition key and the same clustering key, so the whole primary key would be identical. @@ -192,7 +192,7 @@ The average of a `Float` column will come back as `scala.Float` and so on. To take advantage of these operators, simply use the default import, combined with the `function` argument and the `aggregate` function. A few examples are found in [SelectFunctionsTesting.scala](/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala#L99). -The structure of an aggregation query is simple, and the rturn type is +The structure of an aggregation query is simple, and the return type is ```scala database.primitives.select.function(t => sum(t.long)).where(_.pkey eqs record.pkey).aggregate() From dfb1142158bb4cc3d071727efe6fbc75eadb574f Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Fri, 25 May 2018 11:51:43 +0100 Subject: [PATCH 10/30] Adding documentation for the cluster builder --- .../serializers/KeySpaceQuerySerializer.scala | 4 + .../phantom/readme/ConnectorExample2.scala | 38 +++++ readme/src/main/tut/basics/connectors.md | 136 ++++++++++++++++++ 3 files changed, 178 insertions(+) create mode 100644 readme/src/main/scala/com/outworkers/phantom/readme/ConnectorExample2.scala diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/serializers/KeySpaceQuerySerializer.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/serializers/KeySpaceQuerySerializer.scala index abeca8d7c..0e448570a 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/serializers/KeySpaceQuerySerializer.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/serializers/KeySpaceQuerySerializer.scala @@ -122,6 +122,10 @@ sealed class KeySpaceSerializer( val qb: CQLQuery = CQLQuery.empty ) extends KeySpaceCQLQuery { + def option(clause: BuilderClause): KeySpaceSerializer = { + new KeySpaceSerializer(keySpace, QueryBuilder.Alter.option(qb, clause.qb)) + } + def `with`(clause: BuilderClause): KeySpaceSerializer = { new KeySpaceSerializer(keySpace, QueryBuilder.Alter.option(qb, clause.qb)) } diff --git a/readme/src/main/scala/com/outworkers/phantom/readme/ConnectorExample2.scala b/readme/src/main/scala/com/outworkers/phantom/readme/ConnectorExample2.scala new file mode 100644 index 000000000..601b77875 --- /dev/null +++ b/readme/src/main/scala/com/outworkers/phantom/readme/ConnectorExample2.scala @@ -0,0 +1,38 @@ +/* + * Copyright 2013 - 2017 Outworkers Ltd. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.outworkers.phantom.readme + + +import com.datastax.driver.core.{PlainTextAuthProvider, SocketOptions} +import com.outworkers.phantom.dsl._ + +object ConnectorExample2 { + + val default: CassandraConnection = ContactPoint.local + .withClusterBuilder( + _.withSocketOptions( + new SocketOptions() + .setConnectTimeoutMillis(20000) + .setReadTimeoutMillis(20000) + ).withAuthProvider( + new PlainTextAuthProvider("username", "password") + ) + ).keySpace( + KeySpace("phantom").ifNotExists().`with`( + replication eqs SimpleStrategy.replication_factor(1) + ) + ) +} \ No newline at end of file diff --git a/readme/src/main/tut/basics/connectors.md b/readme/src/main/tut/basics/connectors.md index e69de29bb..791c86cf4 100644 --- a/readme/src/main/tut/basics/connectors.md +++ b/readme/src/main/tut/basics/connectors.md @@ -0,0 +1,136 @@ +phantom +[![Build Status](https://travis-ci.org/outworkers/phantom.svg?branch=develop)](https://travis-ci.org/outworkers/phantom?branch=develop) [![Coverage Status](https://coveralls.io/repos/github/outworkers/phantom/badge.svg?branch=develop)](https://coveralls.io/github/outworkers/phantom?branch=develop) [![Codacy Rating](https://api.codacy.com/project/badge/grade/25bee222a7d142ff8151e6ceb39151b4)](https://www.codacy.com/app/flavian/phantom_2) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/com.outworkers/phantom-dsl_2.11/badge.svg)](https://maven-badges.herokuapp.com/maven-central/com.outworkers/phantom-dsl_2.11) [![Bintray](https://api.bintray.com/packages/outworkers/oss-releases/phantom-dsl/images/download.svg) ](https://bintray.com/outworkers/oss-releases/phantom-dsl/_latestVersion) [![ScalaDoc](http://javadoc-badge.appspot.com/com.outworkers/phantom-dsl_2.11.svg?label=scaladoc)](http://javadoc-badge.appspot.com/com.outworkers/phantom-dsl_2.11) [![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/outworkers/phantom?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +=============================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================== + + +Connectors are a thin abstraction layer around a construct native to the Datastax Java Driver, and provide a way to implicitly "inject" a `Session` +where it is required. On top of that, they also allow specifying all the settings you might care about when connecting to Cassandra, such as heartbeat intervals, +pooling options and so on. + + +The options available are better described on the official [ClusterBuilder documentation](https://docs.datastax.com/en/drivers/java/3.1/com/datastax/driver/core/Cluster.Builder.html), but +the same will be available in phantom, and the `ContactPoint` implementation will simply leverage the `ClusterBuilder` API under the hood. + + +An example of how to build a connection is found below, and uses `ContactPoint.local`, which is just a convenience method that's meant to +use a connection to `localhost` on port `9042`, the standard CQL port. There's also `ContactPoint.embedded`, which works together with +the SBT cassandra plugins we offer, `phantom-sbt` and `phantom-docker`, and will attempt to connect to `localhost:9142`. + +```tut:silent + +import com.datastax.driver.core.SocketOptions +import com.outworkers.phantom.dsl._ + +object ConnectorExample { + + val default: CassandraConnection = ContactPoint.local + .withClusterBuilder(_.withSocketOptions( + new SocketOptions() + .setConnectTimeoutMillis(20000) + .setReadTimeoutMillis(20000) + ) + ).noHeartbeat().keySpace( + KeySpace("phantom").ifNotExists().`with`( + replication eqs SimpleStrategy.replication_factor(1) + ) + ) +} + +``` + +To take advantage of the native Java Driver options, simply use the `withClusterBuilder` method. In the example below, +we are going use password protected authentication to Cassandra, using `PlainTextAuthProvider`. Hopefully it is +easy to see how you would + +```tut:silent + +import com.datastax.driver.core.{PlainTextAuthProvider, SocketOptions} +import com.outworkers.phantom.dsl._ + +object ConnectorExample2 { + + val default: CassandraConnection = ContactPoint.local + .withClusterBuilder( + _.withSocketOptions( + new SocketOptions() + .setConnectTimeoutMillis(20000) + .setReadTimeoutMillis(20000) + ).withAuthProvider( + new PlainTextAuthProvider("username", "password") + ) + ).keySpace( + KeySpace("phantom").ifNotExists().`with`( + replication eqs SimpleStrategy.replication_factor(1) + ) + ) +} + +``` + + +### Keyspace options + +There is a second set of options you can control via the `ContactPoint`, and these relate to the CQL query used to create +the keyspace. Not everyone chooses to initialise Cassandra keyspaces with phantom, but it's a useful bit of kit to have +for your development environment. + +Your keyspace creation query is passed through to initialised session, and together with phantom's database automated +creation functionality, you can use phantom to initialise both keyspaces and tables inside it on the fly, not to mention +indexes and UDT types(phantom pro only). + + +To build a keyspace query, use `with` and `and` to chain options on the query. It's important to note they require +a special assigning operator, namely `eqs` instead of `=`, just like phantom queries. This is to prevent a potentially +confusing overload of standard operators. + +*Note*: Using the `with` operator required backticks ``` `with` ```, as the keyword `with` is also a native Scala keyword +used for trait mixins. We preserve the name to match the CQL syntax, but if you would like to avoid that, use `option` instead. + +The below examples will produce the same output CQL query. + +#### Using the ```scala `with` ``` keyword + +```tut:silent + +object KeySpaceQueryWith { + val query = KeySpace("phantom").ifNotExists() + .`with`(replication eqs SimpleStrategy.replication_factor(1)) + .and(durable_writes eqs true) +} + +``` + +#### Using the ```scala option ``` keyword + +```tut:silent + +object KeySpaceQueryOption { + val query = KeySpace("phantom").ifNotExists() + .otion(replication eqs SimpleStrategy.replication_factor(1)) + .and(durable_writes eqs true) +} + +``` + + +#### Keyspace query options + +Using the DSL, you can configure three main things: + +- Replication strategy +- Topology strategy +- Durable writes(true/false) + +More advanced options are also supported, such as `NetworkTopologyStrategy`. + +```tut:silent + +object NetworkTopologyExample { + + val query = KeySpace("phantom").ifNotExists() + .`with`(replication eqs NetworkTopologyStrategy + .data_center("data1", 2) + .data_center("data2", 3) + ).and(durable_writes eqs true) +} +``` From 83cb71b38eef9798804c7ce13e01367a86173b26 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Fri, 25 May 2018 11:52:03 +0100 Subject: [PATCH 11/30] Better docs --- readme/src/main/tut/basics/connectors.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readme/src/main/tut/basics/connectors.md b/readme/src/main/tut/basics/connectors.md index 791c86cf4..4428b9824 100644 --- a/readme/src/main/tut/basics/connectors.md +++ b/readme/src/main/tut/basics/connectors.md @@ -113,7 +113,7 @@ object KeySpaceQueryOption { ``` -#### Keyspace query options +#### Keyspace configuration options Using the DSL, you can configure three main things: From 4eea4681daaf2809592d86c191419d836a377980 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Fri, 25 May 2018 11:53:09 +0100 Subject: [PATCH 12/30] oops --- readme/src/main/tut/basics/connectors.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readme/src/main/tut/basics/connectors.md b/readme/src/main/tut/basics/connectors.md index 4428b9824..9c3e728bd 100644 --- a/readme/src/main/tut/basics/connectors.md +++ b/readme/src/main/tut/basics/connectors.md @@ -106,7 +106,7 @@ object KeySpaceQueryWith { object KeySpaceQueryOption { val query = KeySpace("phantom").ifNotExists() - .otion(replication eqs SimpleStrategy.replication_factor(1)) + .option(replication eqs SimpleStrategy.replication_factor(1)) .and(durable_writes eqs true) } From 2e1e7d1ca2de04857cd80e7478904a16dfdf7adf Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Fri, 13 Jul 2018 17:24:37 +0100 Subject: [PATCH 13/30] fixing some unused imports and fixing tests --- .../scala/com/outworkers/phantom/SelectTable.scala | 5 ----- .../phantom/builder/primitives/Primitives.scala | 11 ----------- .../builder/serializers/KeySpaceQuerySerializer.scala | 4 ---- .../com/outworkers/phantom/ops/SelectQueryOps.scala | 7 +++---- .../query/db/specialized/SelectFunctionsTesting.scala | 4 ++-- .../query/db/specialized/StaticColumnTest.scala | 2 -- .../phantom/finagle/SelectFunctionsTesting.scala | 4 ++-- .../prepared/BatchablePreparedInsertQueryTest.scala | 4 ++-- 8 files changed, 9 insertions(+), 32 deletions(-) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/SelectTable.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/SelectTable.scala index 684fa2d0d..967737d3f 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/SelectTable.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/SelectTable.scala @@ -15,13 +15,8 @@ */ package com.outworkers.phantom -import com.outworkers.phantom.builder.QueryBuilder -import com.outworkers.phantom.builder.clauses.TypedClause import com.outworkers.phantom.builder.ops.SelectColumn import com.outworkers.phantom.builder.query._ -import com.outworkers.phantom.connectors.KeySpace -import shapeless.HList -import shapeless.ops.hlist.{Reverse, Tupler} trait SelectTable[T <: CassandraTable[T, R], R] { self: CassandraTable[T, R] => diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitives.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitives.scala index f3d385300..22479d838 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitives.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitives.scala @@ -15,24 +15,13 @@ */ package com.outworkers.phantom.builder.primitives -import java.math.BigInteger -import java.net.{InetAddress, UnknownHostException} -import java.nio.charset.Charset import java.nio.{BufferUnderflowException, ByteBuffer} -import java.sql.{ Timestamp => JTimestamp } -import java.time.Instant -import java.util.{Date, UUID} import com.datastax.driver.core._ import com.datastax.driver.core.exceptions.{DriverInternalError, InvalidTypeException} -import com.datastax.driver.core.utils.Bytes import com.outworkers.phantom.builder.QueryBuilder -import com.outworkers.phantom.builder.query.engine.CQLQuery -import com.outworkers.phantom.builder.syntax.CQLSyntax -import org.joda.time.{DateTime, DateTimeZone, LocalDate => JodaLocalDate} import scala.collection.generic.CanBuildFrom -import scala.util.Try object Utils { private[phantom] def unsupported(version: ProtocolVersion): DriverInternalError = { diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/serializers/KeySpaceQuerySerializer.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/serializers/KeySpaceQuerySerializer.scala index 0fc337f09..707c74c71 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/serializers/KeySpaceQuerySerializer.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/serializers/KeySpaceQuerySerializer.scala @@ -122,10 +122,6 @@ sealed class KeySpaceSerializer( val qb: CQLQuery = CQLQuery.empty ) extends KeySpaceCQLQuery { - def option(clause: BuilderClause): KeySpaceSerializer = { - new KeySpaceSerializer(keySpace, QueryBuilder.Alter.option(qb, clause.qb)) - } - def `with`(clause: BuilderClause): KeySpaceSerializer = { new KeySpaceSerializer(keySpace, QueryBuilder.Alter.option(qb, clause.qb)) } diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala index cdd071904..30f6165d6 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala @@ -70,14 +70,13 @@ class SelectQueryOps[ * @return A Scala future guaranteed to contain a single result wrapped as an Option. */ @implicitNotFound("You have already defined limit on this Query. You cannot specify multiple limits on the same builder.") - def aggregate[Inner]()( + def aggregate()( implicit session: Session, ev: Limit =:= Unlimited, - opt: Record <:< Tuple1[Option[Inner]], ec: ExecutionContextExecutor - ): F[Option[Inner]] = { + ): F[Option[Record]] = { val enforceLimit = if (query.count) LimitedPart.empty else query.limitedPart append QueryBuilder.limit(1.toString) - optionalFetch(adapter.fromGuava(query.copy(limitedPart = enforceLimit).executableQuery.statement())) + singleFetch(adapter.fromGuava(query.copy(limitedPart = enforceLimit).executableQuery.statement())) } override def fromRow(r: Row): Record = query.fromRow(r) diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala index db86a317f..09613dc17 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala @@ -46,7 +46,7 @@ class SelectFunctionsTesting extends PhantomSuite { whenReady(chain) { res => res shouldBe defined - Try(new DateTime(res.value / 1000, DateTimeZone.UTC)).isSuccess shouldEqual true + Try(new DateTime(res.value._1 / 1000, DateTimeZone.UTC)).isSuccess shouldEqual true } } @@ -94,7 +94,7 @@ class SelectFunctionsTesting extends PhantomSuite { whenReady(chain) { res => res shouldBe defined - potentialList should contain (res.value.value) + potentialList should contain (res.value._1.value) } } diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/StaticColumnTest.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/StaticColumnTest.scala index 2c8e27b0c..615de28b8 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/StaticColumnTest.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/StaticColumnTest.scala @@ -74,8 +74,6 @@ class StaticColumnTest extends PhantomSuite { it should "append a value to a static list and share the update among records" in { val id = gen[UUID] - val helper = TableHelper[StaticCollectionTable, StaticCollectionRecord] - val sample = gen[StaticCollectionRecord].copy(id = id) val sample2 = gen[StaticCollectionRecord].copy(id = id, list = sample.list) diff --git a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala index 6462de934..bd1cf4842 100644 --- a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala +++ b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala @@ -44,7 +44,7 @@ class SelectFunctionsTesting extends PhantomSuite with TwitterFutures { whenReady(chain) { res => res shouldBe defined - Try(new DateTime(res.value / 1000, DateTimeZone.UTC)).isReturn shouldEqual true + Try(new DateTime(res.value._1 / 1000, DateTimeZone.UTC)).isReturn shouldEqual true } } @@ -92,7 +92,7 @@ class SelectFunctionsTesting extends PhantomSuite with TwitterFutures { whenReady(chain) { res => res shouldBe defined - potentialList should contain (res.value.value) + potentialList should contain (res.value._1.value) } } diff --git a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/query/prepared/BatchablePreparedInsertQueryTest.scala b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/query/prepared/BatchablePreparedInsertQueryTest.scala index b4c844709..62aca58ea 100644 --- a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/query/prepared/BatchablePreparedInsertQueryTest.scala +++ b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/query/prepared/BatchablePreparedInsertQueryTest.scala @@ -18,14 +18,14 @@ package com.outworkers.phantom.finagle.query.prepared import com.outworkers.phantom.PhantomSuite import com.outworkers.phantom.builder.query.prepared.ExecutablePreparedQuery import com.outworkers.phantom.finagle._ -import com.outworkers.phantom.tables.{Recipe, TestDatabase} +import com.outworkers.phantom.tables.Recipe import com.outworkers.util.samplers._ class BatchablePreparedInsertQueryTest extends PhantomSuite with TwitterFutures { override def beforeAll(): Unit = { super.beforeAll() - TestDatabase.recipes.createSchema() + val _ = database.recipes.createSchema() } it should "serialize an prepared batch query" in { From 9a21ba3369e1fac23c695cad47aa302b0d32773e Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Sat, 14 Jul 2018 02:31:32 +0100 Subject: [PATCH 14/30] Oops --- .../builder/query/db/specialized/SelectFunctionsTesting.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala index 09613dc17..6ed083946 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala @@ -453,7 +453,7 @@ class SelectFunctionsTesting extends PhantomSuite { } - it should "retrieve the average of a Long field from Cassandra" in { + it should "retrieve the average and the maximum of a Long field from Cassandra" in { val record = gen[PrimitiveRecord] val chain = for { From 54517407a421d4e2a71a7f316e203d7d066e8804 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Sun, 15 Jul 2018 11:23:42 +0300 Subject: [PATCH 15/30] Removing the need to have a Tuple1 extractor manually defined with the new agg method --- .../phantom/ops/SelectQueryOps.scala | 18 ++++++++++++++++++ .../com/outworkers/phantom/PhantomSuite.scala | 4 ++-- .../specialized/SelectFunctionsTesting.scala | 8 ++++---- .../finagle/SelectFunctionsTesting.scala | 8 ++++---- 4 files changed, 28 insertions(+), 10 deletions(-) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala index 30f6165d6..28df4e7ff 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala @@ -62,6 +62,24 @@ class SelectQueryOps[ singleFetch(adapter.fromGuava(query.copy(limitedPart = enforceLimit).executableQuery.statement())) } + /** + * Returns the first row from the select ignoring everything else + * @param session The implicit session provided by a [[com.outworkers.phantom.connectors.Connector]]. + * @param ev The implicit limit for the query. + * @param ec The implicit Scala execution context. + * @return A Scala future guaranteed to contain a single result wrapped as an Option. + */ + @implicitNotFound("You have already defined limit on this Query. You cannot specify multiple limits on the same builder.") + def agg[T]()( + implicit session: Session, + ev: Limit =:= Unlimited, + ec: ExecutionContextExecutor, + unwrap: Record <:< Tuple1[T] + ): F[Option[T]] = { + val enforceLimit = if (query.count) LimitedPart.empty else query.limitedPart append QueryBuilder.limit(1.toString) + singleFetch(adapter.fromGuava(query.copy(limitedPart = enforceLimit).executableQuery.statement())).map(optRec => optRec.map(_._1)) + } + /** * Returns the first row from the select ignoring everything else * @param session The implicit session provided by a [[com.outworkers.phantom.connectors.Connector]]. diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/PhantomSuite.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/PhantomSuite.scala index 72a3323e7..45852e47b 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/PhantomSuite.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/PhantomSuite.scala @@ -19,7 +19,7 @@ import java.util.concurrent.TimeUnit import com.datastax.driver.core.VersionNumber import com.outworkers.phantom.database.DatabaseProvider -import com.outworkers.phantom.dsl.{DateTime, UUID} +import com.outworkers.phantom.dsl.UUID import com.outworkers.phantom.tables.TestDatabase import com.outworkers.util.samplers._ import io.circe.{Encoder, Json} @@ -60,7 +60,7 @@ trait PhantomBaseSuite extends Suite with Matchers override def sample: LocalDate = LocalDate.now(DateTimeZone.UTC) } - override implicit val patienceConfig = PatienceConfig( + override implicit val patienceConfig: PatienceConfig = PatienceConfig( timeout = defaultTimeoutSpan, interval = Span(defaultScalaInterval, Millis) ) diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala index 6ed083946..f7bc3b458 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala @@ -41,12 +41,12 @@ class SelectFunctionsTesting extends PhantomSuite { timestamp <- database.recipes.select .function(t => writetime(t.description)) .where(_.url eqs record.url) - .aggregate() + .agg() } yield timestamp whenReady(chain) { res => res shouldBe defined - Try(new DateTime(res.value._1 / 1000, DateTimeZone.UTC)).isSuccess shouldEqual true + Try(new DateTime(res.value / 1000, DateTimeZone.UTC)).isSuccess shouldEqual true } } @@ -89,12 +89,12 @@ class SelectFunctionsTesting extends PhantomSuite { timestamp <- database.timeuuidTable.select.function(t => ttl(t.name)) .where(_.user eqs record.user) .and(_.id eqs record.id) - .one() + .agg() } yield timestamp whenReady(chain) { res => res shouldBe defined - potentialList should contain (res.value._1.value) + potentialList should contain (res.value) } } diff --git a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala index bd1cf4842..aa579b253 100644 --- a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala +++ b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala @@ -39,12 +39,12 @@ class SelectFunctionsTesting extends PhantomSuite with TwitterFutures { timestamp <- database.recipes.select .function(t => writetime(t.description)) .where(_.url eqs record.url) - .one() + .agg() } yield timestamp whenReady(chain) { res => res shouldBe defined - Try(new DateTime(res.value._1 / 1000, DateTimeZone.UTC)).isReturn shouldEqual true + Try(new DateTime(res.value / 1000, DateTimeZone.UTC)).isReturn shouldEqual true } } @@ -87,12 +87,12 @@ class SelectFunctionsTesting extends PhantomSuite with TwitterFutures { timestamp <- database.timeuuidTable.select.function(t => ttl(t.name)) .where(_.user eqs record.user) .and(_.id eqs record.id) - .one() + .agg() } yield timestamp whenReady(chain) { res => res shouldBe defined - potentialList should contain (res.value._1.value) + potentialList should contain (res.value) } } From 19c66f1e32ac5f0c1a524df5cc1a65c672970544 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Sun, 15 Jul 2018 11:29:26 +0300 Subject: [PATCH 16/30] Removing more warnings --- .../builder/query/db/specialized/JodaDateTimeColumnTest.scala | 2 +- .../finagle/query/prepared/PreparedUpdateQueryTest.scala | 2 +- .../phantom/thrift/tests/tjson/suites/ThriftMapColumnTest.scala | 2 -- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/JodaDateTimeColumnTest.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/JodaDateTimeColumnTest.scala index bd983369d..6fd3b6202 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/JodaDateTimeColumnTest.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/JodaDateTimeColumnTest.scala @@ -24,7 +24,7 @@ class JodaDateTimeColumnTest extends PhantomSuite { override def beforeAll(): Unit = { super.beforeAll() - database.primitivesJoda.createSchema() + val _ = database.primitivesJoda.createSchema() } it should "correctly insert and extract a JodaTime date" in { diff --git a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/query/prepared/PreparedUpdateQueryTest.scala b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/query/prepared/PreparedUpdateQueryTest.scala index f28e20b75..5f946a675 100644 --- a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/query/prepared/PreparedUpdateQueryTest.scala +++ b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/query/prepared/PreparedUpdateQueryTest.scala @@ -24,7 +24,7 @@ class PreparedUpdateQueryTest extends PhantomSuite with TwitterFutures { override def beforeAll(): Unit = { super.beforeAll() - database.recipes.createSchema() + val _ = database.recipes.createSchema() } it should "execute a prepared update query with a single argument bind" in { diff --git a/phantom-thrift/src/test/scala/com/outworkers/phantom/thrift/tests/tjson/suites/ThriftMapColumnTest.scala b/phantom-thrift/src/test/scala/com/outworkers/phantom/thrift/tests/tjson/suites/ThriftMapColumnTest.scala index 21cb54bf0..c2f2e3ce4 100644 --- a/phantom-thrift/src/test/scala/com/outworkers/phantom/thrift/tests/tjson/suites/ThriftMapColumnTest.scala +++ b/phantom-thrift/src/test/scala/com/outworkers/phantom/thrift/tests/tjson/suites/ThriftMapColumnTest.scala @@ -18,9 +18,7 @@ package com.outworkers.phantom.thrift.tests.tjson.suites import com.outworkers.phantom.finagle._ import com.outworkers.phantom.thrift.tests.ThriftRecord import com.outworkers.phantom.thrift.tests.tjson.TJsonSuite -import com.outworkers.phantom.thrift.util.ThriftTestSuite import com.outworkers.util.samplers._ -import org.scalatest.FlatSpec class ThriftMapColumnTest extends TJsonSuite { From f6f178d6214efd77d8bc1a6af6d8508cb5a9826b Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Mon, 16 Jul 2018 13:54:10 +0300 Subject: [PATCH 17/30] Adding a .value call --- .../builder/query/db/specialized/SelectFunctionsTesting.scala | 2 +- .../com/outworkers/phantom/finagle/SelectFunctionsTesting.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala index f7bc3b458..f9e641d4a 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala @@ -94,7 +94,7 @@ class SelectFunctionsTesting extends PhantomSuite { whenReady(chain) { res => res shouldBe defined - potentialList should contain (res.value) + potentialList should contain (res.value.value) } } diff --git a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala index aa579b253..40379351d 100644 --- a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala +++ b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala @@ -92,7 +92,7 @@ class SelectFunctionsTesting extends PhantomSuite with TwitterFutures { whenReady(chain) { res => res shouldBe defined - potentialList should contain (res.value) + potentialList should contain (res.value.value) } } From 14fc6f887f7c5d673ca3bf5d4dc49fd4ffd90dcf Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Mon, 16 Jul 2018 16:44:08 +0300 Subject: [PATCH 18/30] Removing unecessary code. --- .../query/execution/ResultQueryInterface.scala | 16 ---------------- .../outworkers/phantom/ops/SelectQueryOps.scala | 2 +- 2 files changed, 1 insertion(+), 17 deletions(-) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/execution/ResultQueryInterface.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/execution/ResultQueryInterface.scala index 0622f1dc9..dae018961 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/execution/ResultQueryInterface.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/execution/ResultQueryInterface.scala @@ -35,15 +35,6 @@ abstract class ResultQueryInterface[ row map fromRow } - protected[this] def flattenedOption[Inner]( - row: Option[Row] - )(implicit ev: R <:< Tuple1[Option[Inner]]): Option[Inner] = { - row match { - case Some(r) => fromRow(r)._1 - case None => None - } - } - protected[this] def directMapper( results: Iterator[Row] ): List[R] = results.map(fromRow).toList @@ -60,13 +51,6 @@ abstract class ResultQueryInterface[ f map { r => IteratorResult(r.iterate().map(fromRow), r) } } - private[phantom] def optionalFetch[Inner](source: F[ResultSet])( - implicit ec: ExecutionContextExecutor, - ev: R <:< Tuple1[Option[Inner]] - ): F[Option[Inner]] = { - source map { res => flattenedOption(res.value()) } - } - private[phantom] def singleFetch(source: F[ResultSet])( implicit session: Session, ec: ExecutionContextExecutor diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala index 28df4e7ff..045e6d8b8 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala @@ -77,7 +77,7 @@ class SelectQueryOps[ unwrap: Record <:< Tuple1[T] ): F[Option[T]] = { val enforceLimit = if (query.count) LimitedPart.empty else query.limitedPart append QueryBuilder.limit(1.toString) - singleFetch(adapter.fromGuava(query.copy(limitedPart = enforceLimit).executableQuery.statement())).map(optRec => optRec.map(_._1)) + singleFetch(adapter.fromGuava(query.copy(limitedPart = enforceLimit).executableQuery.statement())).map(_.map(_._1)) } /** From 410b740e1944059bb6196d229f58d88fdb1197bf Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Tue, 17 Jul 2018 15:17:04 +0100 Subject: [PATCH 19/30] Using a different function. --- .../phantom/ops/SelectQueryOps.scala | 4 +- .../specialized/SelectFunctionsTesting.scala | 42 +++++++++++++------ .../finagle/SelectFunctionsTesting.scala | 4 +- 3 files changed, 34 insertions(+), 16 deletions(-) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala index 045e6d8b8..bcabe4df7 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala @@ -70,7 +70,7 @@ class SelectQueryOps[ * @return A Scala future guaranteed to contain a single result wrapped as an Option. */ @implicitNotFound("You have already defined limit on this Query. You cannot specify multiple limits on the same builder.") - def agg[T]()( + def aggregate[T]()( implicit session: Session, ev: Limit =:= Unlimited, ec: ExecutionContextExecutor, @@ -88,7 +88,7 @@ class SelectQueryOps[ * @return A Scala future guaranteed to contain a single result wrapped as an Option. */ @implicitNotFound("You have already defined limit on this Query. You cannot specify multiple limits on the same builder.") - def aggregate()( + def multiAggregate()( implicit session: Session, ev: Limit =:= Unlimited, ec: ExecutionContextExecutor diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala index f9e641d4a..9a84faf8b 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala @@ -41,7 +41,7 @@ class SelectFunctionsTesting extends PhantomSuite { timestamp <- database.recipes.select .function(t => writetime(t.description)) .where(_.url eqs record.url) - .agg() + .aggregate() } yield timestamp whenReady(chain) { res => @@ -55,8 +55,10 @@ class SelectFunctionsTesting extends PhantomSuite { val chain = for { _ <- database.timeuuidTable.store(record).future() - timestamp <- database.timeuuidTable.select.function(t => dateOf(t.id)).where(_.user eqs record.user) - .and(_.id eqs record.id).one() + timestamp <- database.timeuuidTable.select.function(t => dateOf(t.id)) + .where(_.user eqs record.user) + .and(_.id eqs record.id) + .aggregate() } yield timestamp whenReady(chain) { res => @@ -69,8 +71,12 @@ class SelectFunctionsTesting extends PhantomSuite { val chain = for { _ <- database.timeuuidTable.store(record).future() - timestamp <- database.timeuuidTable.select.function(t => unixTimestampOf(t.id)).where(_.user eqs record.user) - .and(_.id eqs record.id).one() + timestamp <- database.timeuuidTable + .select + .function(t => unixTimestampOf(t.id)) + .where(_.user eqs record.user) + .and(_.id eqs record.id) + .aggregate() } yield timestamp whenReady(chain) { res => @@ -89,7 +95,7 @@ class SelectFunctionsTesting extends PhantomSuite { timestamp <- database.timeuuidTable.select.function(t => ttl(t.name)) .where(_.user eqs record.user) .and(_.id eqs record.id) - .agg() + .aggregate() } yield timestamp whenReady(chain) { res => @@ -104,7 +110,11 @@ class SelectFunctionsTesting extends PhantomSuite { val chain = for { _ <- database.primitives.store(record).future() - res <- database.primitives.select.function(t => sum(t.int)).where(_.pkey eqs record.pkey).aggregate() + res <- database.primitives + .select + .function(t => sum(t.int)) + .where(_.pkey eqs record.pkey) + .aggregate() } yield res whenReady(chain) { res => @@ -130,7 +140,11 @@ class SelectFunctionsTesting extends PhantomSuite { val chain = for { _ <- database.primitives.store(record).future() - res <- database.primitives.select.function(t => sum(t.float)).where(_.pkey eqs record.pkey).aggregate() + res <- database.primitives + .select + .function(t => sum(t.float)) + .where(_.pkey eqs record.pkey) + .aggregate() } yield res whenReady(chain) { res => @@ -144,7 +158,11 @@ class SelectFunctionsTesting extends PhantomSuite { val chain = for { _ <- database.primitives.store(record).future() - res <- database.primitives.select.function(t => sum(t.bDecimal)).where(_.pkey eqs record.pkey).aggregate() + res <- database.primitives + .select + .function(t => sum(t.bDecimal)) + .where(_.pkey eqs record.pkey) + .aggregate() } yield res whenReady(chain) { res => @@ -428,7 +446,7 @@ class SelectFunctionsTesting extends PhantomSuite { val chain = for { _ <- database.primitives.store(record).future() - res <- database.primitives.select.function(count()).aggregate() + res <- database.primitives.select.function(count()).one() } yield res whenReady(chain) { res => @@ -444,7 +462,7 @@ class SelectFunctionsTesting extends PhantomSuite { res <- database.primitives.select .function(t => avg(t.long) ~ max(t.long)) .where(_.pkey eqs record.pkey) - .aggregate() + .multiAggregate() } yield res whenReady(chain) { res => @@ -460,7 +478,7 @@ class SelectFunctionsTesting extends PhantomSuite { _ <- database.primitives.store(record).future() res <- database.primitives.select.function(t => avg(t.long) ~ max(t.long)) .where(_.pkey eqs record.pkey) - .one() + .multiAggregate() } yield res whenReady(chain) { res => diff --git a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala index 40379351d..4584c36d4 100644 --- a/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala +++ b/phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/SelectFunctionsTesting.scala @@ -39,7 +39,7 @@ class SelectFunctionsTesting extends PhantomSuite with TwitterFutures { timestamp <- database.recipes.select .function(t => writetime(t.description)) .where(_.url eqs record.url) - .agg() + .aggregate() } yield timestamp whenReady(chain) { res => @@ -87,7 +87,7 @@ class SelectFunctionsTesting extends PhantomSuite with TwitterFutures { timestamp <- database.timeuuidTable.select.function(t => ttl(t.name)) .where(_.user eqs record.user) .and(_.id eqs record.id) - .agg() + .aggregate() } yield timestamp whenReady(chain) { res => From aa45e65ccef3416ce078b6ed2d7d4a243f6820f9 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Tue, 17 Jul 2018 20:00:12 +0100 Subject: [PATCH 20/30] Updating the documentation. --- .../scala/com/outworkers/phantom/ops/SelectQueryOps.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala index bcabe4df7..641faa3c9 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala @@ -63,13 +63,15 @@ class SelectQueryOps[ } /** - * Returns the first row from the select ignoring everything else + * Returns the result of an aggregate function call, provided a single aggregate function was invoked. + * This is used to circumvent some compiler limitations around HLists being tupled. Phantom relies on HLists + * to compute a multiple aggregate return function extractor, and if a single aggregate is selected, + * a Tuple1(value) is returned. This function will extract the content of the Tuple1 to have a more presentable type. * @param session The implicit session provided by a [[com.outworkers.phantom.connectors.Connector]]. * @param ev The implicit limit for the query. * @param ec The implicit Scala execution context. * @return A Scala future guaranteed to contain a single result wrapped as an Option. */ - @implicitNotFound("You have already defined limit on this Query. You cannot specify multiple limits on the same builder.") def aggregate[T]()( implicit session: Session, ev: Limit =:= Unlimited, From 99f0ef85fc04e7d7016b1248cd278f8d7b0d3199 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Wed, 18 Jul 2018 08:13:43 +0100 Subject: [PATCH 21/30] Bumping Scala version to match .travis.yml --- .travis.yml | 6 +++--- build.sbt | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index d6c5204e6..a3c86dd50 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,7 @@ sudo: required dist: trusty scala: - 2.10.6 -- 2.11.11 +- 2.11.12 python: 2.7.13 cache: directories: @@ -15,7 +15,7 @@ before_cache: - find $HOME/.sbt -name "*.lock" -delete env: global: - - TARGET_SCALA_VERSION: 2.12.5 + - TARGET_SCALA_VERSION: 2.12.6 - GH_REF: github.com/outworkers/phantom.git - secure: V5iziDRj988+kcpW6PHOjZZYoayDi2+Fjx2Y6F9dL2mYw3kcjrwyyQgpWoMPMrXHdR61xoollyytgZPfavNViocNxYZMVRfQBLeTCd+mvuLQEvra6aRWl7XaYlpGi5+uHEh5k84MsRNsEZKiiuabxMRZvglZSC8QHYqYgDx3rho= - secure: nslC+pNpj8XnEnolwAhfVMP0j/mNnlMm9MCqD3IWiRlh5RRgt6t5s1XCSF6y9y/kOB4p0ny3ly7qR4uZxtKvVnJzjrrpf5UAlSpFjA+s7jMgumQWuUsDm6u3uP5DykTWNwa8xpRT7J2vcCM/MoP1DSwuHQ7ptO8yFfVlel3LFtY= @@ -35,7 +35,7 @@ jdk: - oraclejdk8 matrix: include: - - scala: 2.12.5 + - scala: 2.12.6 jdk: oraclejdk8 addons: apt: diff --git a/build.sbt b/build.sbt index 7248d9919..98b05fdbc 100644 --- a/build.sbt +++ b/build.sbt @@ -105,8 +105,8 @@ lazy val Versions = new { val circe = "0.8.0" val scala210 = "2.10.6" - val scala211 = "2.11.11" - val scala212 = "2.12.5" + val scala211 = "2.11.12" + val scala212 = "2.12.6" val scalaAll = Seq(scala210, scala211, scala212) val scala = new { From a7ce23ed834fe00925d71755df4b0531af1e4a0c Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Wed, 18 Jul 2018 13:16:34 +0100 Subject: [PATCH 22/30] Revert "Bumping Scala version to match .travis.yml" This reverts commit 99f0ef85fc04e7d7016b1248cd278f8d7b0d3199. --- .travis.yml | 6 +++--- build.sbt | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index a3c86dd50..d6c5204e6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,7 @@ sudo: required dist: trusty scala: - 2.10.6 -- 2.11.12 +- 2.11.11 python: 2.7.13 cache: directories: @@ -15,7 +15,7 @@ before_cache: - find $HOME/.sbt -name "*.lock" -delete env: global: - - TARGET_SCALA_VERSION: 2.12.6 + - TARGET_SCALA_VERSION: 2.12.5 - GH_REF: github.com/outworkers/phantom.git - secure: V5iziDRj988+kcpW6PHOjZZYoayDi2+Fjx2Y6F9dL2mYw3kcjrwyyQgpWoMPMrXHdR61xoollyytgZPfavNViocNxYZMVRfQBLeTCd+mvuLQEvra6aRWl7XaYlpGi5+uHEh5k84MsRNsEZKiiuabxMRZvglZSC8QHYqYgDx3rho= - secure: nslC+pNpj8XnEnolwAhfVMP0j/mNnlMm9MCqD3IWiRlh5RRgt6t5s1XCSF6y9y/kOB4p0ny3ly7qR4uZxtKvVnJzjrrpf5UAlSpFjA+s7jMgumQWuUsDm6u3uP5DykTWNwa8xpRT7J2vcCM/MoP1DSwuHQ7ptO8yFfVlel3LFtY= @@ -35,7 +35,7 @@ jdk: - oraclejdk8 matrix: include: - - scala: 2.12.6 + - scala: 2.12.5 jdk: oraclejdk8 addons: apt: diff --git a/build.sbt b/build.sbt index 98b05fdbc..7248d9919 100644 --- a/build.sbt +++ b/build.sbt @@ -105,8 +105,8 @@ lazy val Versions = new { val circe = "0.8.0" val scala210 = "2.10.6" - val scala211 = "2.11.12" - val scala212 = "2.12.6" + val scala211 = "2.11.11" + val scala212 = "2.12.5" val scalaAll = Seq(scala210, scala211, scala212) val scala = new { From 1d6e76fae5b68c89338c93275801cf08c8044b9b Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Wed, 18 Jul 2018 14:40:21 +0100 Subject: [PATCH 23/30] Upgrading JDK versions --- .travis.yml | 14 +++++--------- build.sbt | 4 ++-- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/.travis.yml b/.travis.yml index d6c5204e6..f1b6023cc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,7 @@ sudo: required dist: trusty scala: - 2.10.6 -- 2.11.11 +- 2.11.12 python: 2.7.13 cache: directories: @@ -15,7 +15,7 @@ before_cache: - find $HOME/.sbt -name "*.lock" -delete env: global: - - TARGET_SCALA_VERSION: 2.12.5 + - TARGET_SCALA_VERSION: 2.12.6 - GH_REF: github.com/outworkers/phantom.git - secure: V5iziDRj988+kcpW6PHOjZZYoayDi2+Fjx2Y6F9dL2mYw3kcjrwyyQgpWoMPMrXHdR61xoollyytgZPfavNViocNxYZMVRfQBLeTCd+mvuLQEvra6aRWl7XaYlpGi5+uHEh5k84MsRNsEZKiiuabxMRZvglZSC8QHYqYgDx3rho= - secure: nslC+pNpj8XnEnolwAhfVMP0j/mNnlMm9MCqD3IWiRlh5RRgt6t5s1XCSF6y9y/kOB4p0ny3ly7qR4uZxtKvVnJzjrrpf5UAlSpFjA+s7jMgumQWuUsDm6u3uP5DykTWNwa8xpRT7J2vcCM/MoP1DSwuHQ7ptO8yFfVlel3LFtY= @@ -32,15 +32,11 @@ branches: - master - develop jdk: -- oraclejdk8 +- oraclejdk9 matrix: include: - - scala: 2.12.5 - jdk: oraclejdk8 -addons: - apt: - packages: - - oracle-java8-installer + - scala: 2.12.6 + jdk: oraclejdk9 before_install: unset SBT_OPTS JVM_OPTS install: - ./build/install_cassandra.sh diff --git a/build.sbt b/build.sbt index 7248d9919..98b05fdbc 100644 --- a/build.sbt +++ b/build.sbt @@ -105,8 +105,8 @@ lazy val Versions = new { val circe = "0.8.0" val scala210 = "2.10.6" - val scala211 = "2.11.11" - val scala212 = "2.12.5" + val scala211 = "2.11.12" + val scala212 = "2.12.6" val scalaAll = Seq(scala210, scala211, scala212) val scala = new { From 343eb73115f265d6923d3a0579fd36960b4c4051 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Wed, 18 Jul 2018 18:58:36 +0100 Subject: [PATCH 24/30] Adding OracleJDK8 instead of 9 --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index f1b6023cc..09350820f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,11 +32,11 @@ branches: - master - develop jdk: -- oraclejdk9 +- oraclejdk8 matrix: include: - scala: 2.12.6 - jdk: oraclejdk9 + jdk: oraclejdk8 before_install: unset SBT_OPTS JVM_OPTS install: - ./build/install_cassandra.sh From 1410d3e6faa1fd57eac3acaf0a1d393ee919ee14 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Thu, 19 Jul 2018 14:28:10 +0100 Subject: [PATCH 25/30] Removing unecessary code. --- .../outworkers/phantom/builder/primitives/Primitives.scala | 2 +- .../src/main/scala/com/outworkers/phantom/ops/DbOps.scala | 2 -- .../scala/com/outworkers/phantom/ops/SelectQueryOps.scala | 6 ++---- 3 files changed, 3 insertions(+), 7 deletions(-) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitives.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitives.scala index 22479d838..cdb756f2e 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitives.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/primitives/Primitives.scala @@ -82,7 +82,7 @@ object Utils { } /** - * Utility method that "packs" together a list of {@link ByteBuffer}s containing + * Utility method that "packs" together a list of {{java.nio.ByteBuffer}}s containing * serialized collection elements. * Mainly intended for use with collection codecs when serializing collections. * diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/DbOps.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/DbOps.scala index dbf38cee8..7f7d2eb42 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/DbOps.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/DbOps.scala @@ -15,9 +15,7 @@ */ package com.outworkers.phantom.ops -import com.datastax.driver.core.Session import com.outworkers.phantom.ResultSet -import com.outworkers.phantom.builder.query.CreateQuery.DelegatedCreateQuery import com.outworkers.phantom.builder.query.execution._ import com.outworkers.phantom.database.Database diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala index 641faa3c9..3f8bbe792 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/SelectQueryOps.scala @@ -78,8 +78,7 @@ class SelectQueryOps[ ec: ExecutionContextExecutor, unwrap: Record <:< Tuple1[T] ): F[Option[T]] = { - val enforceLimit = if (query.count) LimitedPart.empty else query.limitedPart append QueryBuilder.limit(1.toString) - singleFetch(adapter.fromGuava(query.copy(limitedPart = enforceLimit).executableQuery.statement())).map(_.map(_._1)) + singleFetch(adapter.fromGuava(query.executableQuery.statement())).map(_.map { case Tuple1(vd: T) => vd }) } /** @@ -95,8 +94,7 @@ class SelectQueryOps[ ev: Limit =:= Unlimited, ec: ExecutionContextExecutor ): F[Option[Record]] = { - val enforceLimit = if (query.count) LimitedPart.empty else query.limitedPart append QueryBuilder.limit(1.toString) - singleFetch(adapter.fromGuava(query.copy(limitedPart = enforceLimit).executableQuery.statement())) + singleFetch(adapter.fromGuava(query.executableQuery.statement())) } override def fromRow(r: Row): Record = query.fromRow(r) From f076b76085b908f64118f89129c6b9f6819a0c4d Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Thu, 19 Jul 2018 14:38:19 +0100 Subject: [PATCH 26/30] Removing unused file --- .../phantom/ops/AggregateSequence.scala | 46 ------------------- 1 file changed, 46 deletions(-) delete mode 100644 phantom-dsl/src/main/scala/com/outworkers/phantom/ops/AggregateSequence.scala diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/AggregateSequence.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/AggregateSequence.scala deleted file mode 100644 index 9a9884bd1..000000000 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/ops/AggregateSequence.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2013 - 2017 Outworkers Ltd. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.outworkers.phantom.ops - -import shapeless._ -import shapeless.ops.tuple.Prepend - -/* -trait AggregateSequence[L <: HList] extends DepFn1[L] - -object AggregateSequence { - type Aux[L <: HList, Out0] = AggregateSequence[L] { type Out = Out0 } - - implicit def hnilAggregateSequence: Aux[HNil, Option[Unit]] = - new AggregateSequence[HNil] { - type Out = Option[Unit] - def apply(l: HNil): Option[Unit] = Some(()) - } - - implicit def hconsAggregateSequence[H, T <: HList, OutT]( - implicit fst: Aux[T, Option[OutT]], - pre: Prepend[Tuple1[H], OutT] - ): Aux[Option[H] :: T, Option[pre.Out]] = new AggregateSequence[Option[H] :: T] { - type Out = Option[pre.Out] - - def apply(l: Option[H] :: T): Option[pre.Out] = { - l.head.flatMap(fst(l.tail)).map { - case (h, t) => pre(Tuple1(h), t) - } - } - } - -}*/ \ No newline at end of file From 328734d8e6b8c463045c6be6875a7037649767f2 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Thu, 19 Jul 2018 14:44:01 +0100 Subject: [PATCH 27/30] Cleanup --- .../phantom/builder/query/db/crud/SelectTest.scala | 10 +++++----- .../serializers/InsertQuerySerializationTest.scala | 1 - 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/crud/SelectTest.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/crud/SelectTest.scala index 2c52f34b7..a84a65360 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/crud/SelectTest.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/crud/SelectTest.scala @@ -33,7 +33,7 @@ class SelectTest extends PhantomSuite { val row = gen[PrimitiveRecord] val chain = for { - store <- database.primitives.store(row).future() + _ <- database.primitives.store(row).future() b <- database.primitives.select.where(_.pkey eqs row.pkey).one } yield b @@ -47,8 +47,8 @@ class SelectTest extends PhantomSuite { val row = gen[UserSchema] val chain = for { - store <- database.userSchema.truncate().future() - store <- database.userSchema.store(row).future() + _ <- database.userSchema.truncate().future() + _ <- database.userSchema.store(row).future() res <- database.userSchema.select.one() } yield res @@ -61,8 +61,8 @@ class SelectTest extends PhantomSuite { val row = gen[UserSchema] val chain = for { - store <- database.userSchema.truncate().future() - store <- database.userSchema.store(row).future() + _ <- database.userSchema.truncate().future() + _ <- database.userSchema.store(row).future() res <- database.userSchema.checkUserId } yield res diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/serializers/InsertQuerySerializationTest.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/serializers/InsertQuerySerializationTest.scala index f299b46c3..b68a7a88d 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/serializers/InsertQuerySerializationTest.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/serializers/InsertQuerySerializationTest.scala @@ -111,7 +111,6 @@ class InsertQuerySerializationTest extends QueryBuilderTest { val sample = gen[Recipe] val json = compactJson(renderJValue(Extraction.decompose(sample))) val query = TestDatabase.recipes.insert.json(json).queryString - } "should append USING clause after lightweight part " in { From 8a7bc6a2dc371728d76ea35719735e828905bfe5 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Thu, 19 Jul 2018 15:04:02 +0100 Subject: [PATCH 28/30] Ignoring flaky test --- .../com/outworkers/phantom/builder/batch/BatchQueryTest.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/batch/BatchQueryTest.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/batch/BatchQueryTest.scala index c3d74d0af..17118bf10 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/batch/BatchQueryTest.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/batch/BatchQueryTest.scala @@ -221,7 +221,7 @@ class BatchQueryTest extends PhantomSuite { } } - it should "prioritise batch updates in a last first order" in { + ignore should "prioritise batch updates in a last first order" in { val row = gen[JodaRow] val statement1 = database.primitivesJoda.insert From 5152adcfc05a44818adefb88aba762cccb5924f2 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Thu, 19 Jul 2018 15:10:04 +0100 Subject: [PATCH 29/30] Re-adding all output. --- build.sbt | 1 - 1 file changed, 1 deletion(-) diff --git a/build.sbt b/build.sbt index 98b05fdbc..c1edaca76 100644 --- a/build.sbt +++ b/build.sbt @@ -169,7 +169,6 @@ val sharedSettings: Seq[Def.Setting[_]] = Defaults.coreDefaultSettings ++ Seq( Resolver.jcenterRepo ), - logLevel in ThisBuild := { if (Publishing.runningUnderCi) Level.Error else Level.Info }, libraryDependencies ++= Seq( "ch.qos.logback" % "logback-classic" % Versions.logback % Test, "org.slf4j" % "log4j-over-slf4j" % Versions.slf4j From 1645969f8e6a13a62417b2811196a63f865e8504 Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Thu, 19 Jul 2018 15:24:58 +0100 Subject: [PATCH 30/30] Adding log levels per scope. --- build.sbt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/build.sbt b/build.sbt index c1edaca76..8a5f8e37a 100644 --- a/build.sbt +++ b/build.sbt @@ -169,6 +169,8 @@ val sharedSettings: Seq[Def.Setting[_]] = Defaults.coreDefaultSettings ++ Seq( Resolver.jcenterRepo ), + logLevel in Compile := { if (Publishing.runningUnderCi) Level.Error else Level.Info }, + logLevel in Test := Level.Info, libraryDependencies ++= Seq( "ch.qos.logback" % "logback-classic" % Versions.logback % Test, "org.slf4j" % "log4j-over-slf4j" % Versions.slf4j