From 0ad1afd4f59c0955c3f03d11fefc1221e280adce Mon Sep 17 00:00:00 2001 From: Flavian Alexandru Date: Sat, 17 Jun 2017 01:30:45 +0100 Subject: [PATCH] Feature/sasi docs (#701) * Integrating tut typechecking of documentation and increasing Cassandra version * Adding docs and more tests for SASI indexes. * Removing more unused code. * Removing numeric operations. * Adding table markdown syntax. * Updating links. * Fixing one more doc link * More docs --- build.sbt | 11 +- build/install_cassandra.sh | 2 +- docs/basics/batches.md | 8 +- docs/basics/database.md | 31 +++- docs/basics/tables.md | 22 ++- docs/migrate.md | 26 +++- docs/querying/indexes/sasi.md | 145 ++++++++++++++++++ docs/querying/select.md | 19 ++- docs/roadmap.md | 24 +-- .../builder/ops/ImplicitMechanism.scala | 8 +- .../phantom/builder/ops/QueryColumn.scala | 4 +- .../phantom/builder/query/sasi/ops.scala | 14 -- .../phantom/column/AbstractColumn.scala | 1 - .../outworkers/phantom/keys/SASIIndex.scala | 2 - .../query/sasi/SASIIntegrationTest.scala | 69 +++++++++ .../builder/query/sasi/SASIQueriesTest.scala | 7 +- project/plugins.sbt | 2 + 17 files changed, 312 insertions(+), 83 deletions(-) create mode 100644 docs/querying/indexes/sasi.md diff --git a/build.sbt b/build.sbt index e1fd5b080..d147598d7 100644 --- a/build.sbt +++ b/build.sbt @@ -21,7 +21,7 @@ import sbt.Defaults._ lazy val Versions = new { val logback = "1.2.3" val sbt = "0.13.13" - val util = "0.34.0" + val util = "0.36.0" val json4s = "3.5.1" val datastax = "3.2.0" val scalatest = "3.0.1" @@ -145,10 +145,15 @@ lazy val phantom = (project in file(".")) ).settings( name := "phantom", moduleName := "phantom", - pgpPassphrase := Publishing.pgpPass + pgpPassphrase := Publishing.pgpPass, + tutSourceDirectory := { + val directory = baseDirectory.value / "docs" + println(directory.getAbsolutePath.toString) + directory + } ).aggregate( fullProjectList: _* - ).enablePlugins(CrossPerProjectPlugin) + ).enablePlugins(CrossPerProjectPlugin).enablePlugins(TutPlugin) lazy val phantomDsl = (project in file("phantom-dsl")) .settings(sharedSettings: _*) diff --git a/build/install_cassandra.sh b/build/install_cassandra.sh index 3dd5aa3c6..ead837b58 100755 --- a/build/install_cassandra.sh +++ b/build/install_cassandra.sh @@ -30,7 +30,7 @@ jdk_version_8_or_more=$(check_java_version) if [ "$jdk_version_8_or_more" = true ]; then - cassandra_version="3.2" + cassandra_version="3.8" else cassandra_version="2.2.9" fi diff --git a/docs/basics/batches.md b/docs/basics/batches.md index f19508fe9..3bcab069f 100644 --- a/docs/basics/batches.md +++ b/docs/basics/batches.md @@ -20,7 +20,7 @@ phantom also supports `COUNTER` batch updates and `UNLOGGED` batch updates. LOGGED batch statements =========================================================== -```scala +```tut import com.outworkers.phantom.dsl._ @@ -35,7 +35,7 @@ Batch.logged ============================================================ back to top -```scala +```tut import com.outworkers.phantom.dsl._ @@ -48,7 +48,7 @@ Batch.counter Counter operations also offer a standard overloaded operator syntax, so instead of `increment` and `decrement` you can also use `+=` and `-=` to achieve the same thing. -```scala +```tut import com.outworkers.phantom.dsl._ @@ -61,7 +61,7 @@ Batch.counter UNLOGGED batch statements ============================================================ -```scala +```tut import com.outworkers.phantom.dsl._ diff --git a/docs/basics/database.md b/docs/basics/database.md index 377eb8521..c61830a91 100644 --- a/docs/basics/database.md +++ b/docs/basics/database.md @@ -25,7 +25,8 @@ However, from an app or service consumer perspective, when pulling in dependenci That's why phantom comes with very concise levels of segregation between the various consumer levels. When we create a table, we mix in `RootConnector`. -```scala +```tut + case class Recipe( url: String, description: Option[String], @@ -55,7 +56,7 @@ class Recipes extends CassandraTable[Recipes, Recipe] with RootConnector { ``` The whole purpose of `RootConnector` is quite simple, it's saying an implementor will basically specify the `session` and `keySpace` of choice. It looks like this, and it's available in phantom by default via the default import, `import com.outworkers.phantom.dsl._`. -```scala +```tut import com.datastax.driver.core.Session @@ -69,7 +70,10 @@ trait RootConnector { Later on when we start creating databases, we pass in a `ContactPoint` or what we call a `connector` in more plain English, which basically fully encapsulates a Cassandra connection with all the possible details and settings required to run an application. -```scala +```tut + +import com.outworkers.phantom.dsl._ + class RecipesDatabase( override val connector: CassandraConnection ) extends Database[RecipesDatabase](connector) { @@ -98,7 +102,9 @@ Sometimes developers can choose to wrap a `database` further, into specific data And this is why we offer another native construct, namely the `DatabaseProvider` trait. This is another really simple but really powerful trait that's generally used cake pattern style. -```scala +```tut + +import com.outworkers.phantom.dsl._ trait DatabaseProvider[T <: Database[T]] { def database: T @@ -107,7 +113,10 @@ trait DatabaseProvider[T <: Database[T]] { This is pretty simple in its design, it simply aims to provide a simple way of injecting a reference to a particular `database` inside a consumer. For the sake of argument, let's say we are designing a `UserService` backed by Cassandra and phantom. Here's how it might look like: -```scala +```tut + +import scala.concurrent.Future +import com.outworkers.phantom.dsl._ class UserDatabase( override val connector: CassandraConnection @@ -158,7 +167,9 @@ Let's go ahead and create two complete examples. We are going to make some simpl Let's look at the most basic example of defining a test connector, which will use all default settings plus a call to `noHearbeat` which will disable heartbeats by setting a pooling option to 0 inside the `ClusterBuilder`. We will go through that in more detail in a second, to show how we can specify more complex options using `ContactPoint`. -```scala +```tut + +import com.outworkers.phantom.dsl._ object TestConnector { val connector = ContactPoint.local @@ -177,7 +188,9 @@ It may feel verbose or slightly too much at first, but the objects wrapping the And this is how you would use that provider trait now. We're going to assume ScalaTest is the testing framework in use, but of course that doesn't matter. -```scala +```tut + +import com.outworkers.phantom.dsl._ import org.scalatest.{BeforeAndAfterAll, OptionValues, Matchers, FlatSpec} import org.scalatest.concurrent.ScalaFutures @@ -245,7 +258,9 @@ To override the settings that will be used during schema auto-generation at `Dat When you later call `database.create` or `database.createAsync` or any other flavour of auto-generation on a `Database`, the `autocreate` overriden below will be respected. -```scala +```tut + +import com.outworkers.phantom.dsl._ class UserDatabase( override val connector: CassandraConnection diff --git a/docs/basics/tables.md b/docs/basics/tables.md index ca3041b2a..1455dcf06 100644 --- a/docs/basics/tables.md +++ b/docs/basics/tables.md @@ -24,7 +24,7 @@ case class Recipe( uid: UUID ) -abstract class Recipes extends CassandraTable[Recipes, Recipe] with RootConnector { +abstract class Recipes extends Table[Recipes, Recipe] { object url extends StringColumn with PartitionKey @@ -72,13 +72,11 @@ implemented via `com.outworkers.phantom.NamingStrategy`. These control only the not the columns or anything else. -``` -| Strategy | Casing | -| =========================== | ============================= | -| NamingStrategy.CamelCase | lowCamelCase | -| NamingStrategy.SnakeCase | low_snake_case | -| NamingStrategy.Default | Preserves the user input | -``` +| Strategy | Casing | +| ----------------------------- | ----------------------------- | +| `NamingStrategy.CamelCase` | lowCamelCase | +| `NamingStrategy.SnakeCase` | low_snake_case | +| `NamingStrategy.Default` | Preserves the user input | All available imports will have two flavours. It's important to note they only work when imported in the scope where tables are defined. That's where the macro will evaluate @@ -110,7 +108,7 @@ case class ExampleModel ( test: Option[Int] ) -abstract class ExampleRecord extends CassandraTable[ExampleRecord, ExampleModel] with RootConnector { +abstract class ExampleRecord extends Table[ExampleRecord, ExampleModel] { object id extends UUIDColumn with PartitionKey object timestamp extends DateTimeColumn with ClusteringOrder with Ascending object name extends StringColumn @@ -232,7 +230,7 @@ case class Record( email: String ) -abstract class MyTable extends CassandraTable[MyTable, Record] { +abstract class MyTable extends Table[MyTable, Record] { object id extends UUIDColumn with PartitionKey object name extends StringColumn @@ -296,7 +294,7 @@ case class Record( email: String ) -abstract class RecordsByCountry extends CassandraTable[RecordsByCountry, Record] { +abstract class RecordsByCountry extends Table[RecordsByCountry, Record] { object countryCode extends StringColumn with PartitionKey object id extends UUIDColumn with PrimaryKey object name extends StringColumn @@ -343,7 +341,7 @@ case class Record( email: String ) -abstract class RecordsByCountryAndRegion extends CassandraTable[RecordsByCountryAndRegion, Record] { +abstract class RecordsByCountryAndRegion extends Table[RecordsByCountryAndRegion, Record] { object countryCode extends StringColumn with PartitionKey object region extends StringColumn with PartitionKey object id extends UUIDColumn with PrimaryKey diff --git a/docs/migrate.md b/docs/migrate.md index f68075c6e..58b243543 100644 --- a/docs/migrate.md +++ b/docs/migrate.md @@ -18,6 +18,7 @@ Feedback and contributions are welcome, and we are happy to prioritise any cruci - [x] Revert all Outworkers projects and all their dependencies to the Apache V2 License. - [x] Publish `outworkers-util` and all sub modules to Maven Central. - [x] Publish `outworkers-diesel` and all sub modules to Maven Central. +- [x] Drop all dependencies outside of `shapeless` and `datastax-java-driver` from `phantom-dsl`. - [x] Remove all non standard resolvers from Phantom, all dependencies should build from JCenter and Maven Central by default with no custom resolvers required. - [x] Change all package names and resolvers to reflect our business name change from `Websudos` to `Outworkers`. - [x] Create a `1.30.x` release that allows users to transition to a no custom resolver version of Phantom 1.0.x even before 2.0.0 is stable. @@ -33,6 +34,16 @@ Feedback and contributions are welcome, and we are happy to prioritise any cruci - [x] Generate the `fromRow` if the fields match, they are in arbitrary order, but there are no duplicate types. - [x] Allow arbitrary inheritance and usage patterns for Cassandra tables, and resolve inheritance resolutions with macros to correctly identify desired table structures. +#### Vast improvements + +- [x] Re-implement primitive types using native macro derived marshallers/unmarshallers. +- [x] Re-implement prepared statement binds to use macro derived serializers. +- [x] Add debug strings to `BatchQuery`. +- [x] Use `AnyVal` in the `ImplicitMechanism` where possible. +- [x] Enforce `store` method typechecking at compile time. +- [x] Use `shapeless.HList` as the core primitive inside table store methods. +- [x] Add advanced debugging to the macro API. + #### Tech debt - [x] Correctly implement Cassandra pagination using iterators, currently setting a `fetchSize` on a query does not correctly propagate or consume the resulting iterator, which leads to API inconsistencies and `PagingState` not being set on any `ResultSet`. @@ -42,11 +53,12 @@ Feedback and contributions are welcome, and we are happy to prioritise any cruci #### Features - [ ] Native support for multi-tenanted environments via cached sessions. -- [ ] Case sensitive CQL. -- [ ] Materialized views. -- [ ] SASI index support +- [x] Case sensitive CQL. +- [ ] Materialized views.(phantom pro) +- [x] SASI index support - [ ] Support for `PER PARTITION LIMIT` in `SelectQuery`. - [ ] Support for `GROUP BY` in `SelectQuery`. +- [x] Implement a compact table DSL that does not require passing in `this` to columns. #### Scala 2.12 support @@ -55,7 +67,7 @@ Feedback and contributions are welcome, and we are happy to prioritise any cruci - [x] Add support for Scala 2.12 in `phantom-dsl` - [x] Add support for Scala 2.12 in `phantom-connectors` - [x] Add support for Scala 2.12 in `phantom-example` -- [ ] Add support for Scala 2.12 in `phantom-streams` +- [x] Add support for Scala 2.12 in `phantom-streams` - [x] Add support for Scala 2.12 in `phantom-thrift` - [x] Add support for Scala 2.12 in `phantom-finagle` @@ -115,19 +127,19 @@ As of phantom 2.5.0, if you have a manually defined method to insert records int For a full set of details on how the `store` method is generated, refer to [the store method](basics/tables#store-methods) docs. This is because phantom successfully auto-generates a basic store method that looks like this below. -```scala +```tut import com.outworkers.phantom.dsl._ import scala.concurrent.duration._ case class Record( - id: java.util.UUID, + id: UUID, name: String, firstName: String, email: String ) -abstract class MyTable extends CassandraTable[MyTable, Record] { +abstract class MyTable extends Table[MyTable, Record] { object id extends UUIDColumn with PartitionKey object name extends StringColumn diff --git a/docs/querying/indexes/sasi.md b/docs/querying/indexes/sasi.md new file mode 100644 index 000000000..f95b6e8ec --- /dev/null +++ b/docs/querying/indexes/sasi.md @@ -0,0 +1,145 @@ +### SASI Index support + +Available as of phantom 2.11.0, SASI indexes introduce support for a Cassandra 3.4+ feature, namely SS Table attached +secondary indexes. For more details on the internals of SASI within Cassandra, the details are [here](http://www.doanduyhai.com/blog/?p=2058) +or [here](http://batey.info/cassandra-sasi.html). + +SASI was an attempt to improve performance on the more traditional secondary indexing, which is notoriously unreliabl +performance wise after a couple thousand records. + + +### Using SASI support in phantom. + +SASI indexes are natively supported in the standard `phantom-dsl` module, so as long as you have the following in your +`build.sbt` you will not require any special dependencies. + +[![Maven Central](https://maven-badges.herokuapp.com/maven-central/com.outworkers/phantom-dsl_2.11/badge.svg)](https://maven-badges.herokuapp.com/maven-central/com.outworkers/phantom-dsl_2.11) + +```scala + +val phantomVersion = "__check_badge_above__" + +libraryDependencies ++= Seq( + "com.outworkers" %% "phantom-dsl" % phantomVersion +) +``` + +Simple example: + +```tut + +import com.outworkers.phantom.dsl._ + +abstract class MultiSASITable extends Table[MultiSASITable, MultiSASIRecord] { + object id extends UUIDColumn with PartitionKey + + object name extends StringColumn with SASIIndex[Mode.Contains] { + override def analyzer: NonTokenizingAnalyzer[Mode.Contains] = { + Analyzer.NonTokenizingAnalyzer[Mode.Contains]().normalizeLowercase(true) + } + } + + object customers extends IntColumn with SASIIndex[Mode.Sparse] { + override def analyzer: Analyzer[Mode.Sparse] = Analyzer[Mode.Sparse]() + } + + object phoneNumber extends StringColumn with SASIIndex[Mode.Prefix] { + override def analyzer: StandardAnalyzer[Mode.Prefix] = { + Analyzer.StandardAnalyzer[Mode.Prefix]().skipStopWords(true).enableStemming(true) + } + } + + object setCol extends SetColumn[Int] + object listCol extends ListColumn[String] +} +``` + + +#### Analyzers + +SASI ships with 3 basic analyzers that are re-created in phantom. + +- `Analzyer.NonTokenizingAnalyzer` +- `Analyzer.StandardAnalyzer` +- `Analyzer.DefaultAnalyzer` + +The `DefaultAnalyzer` will allow you to set all the properties from the root, and the other two are nothing more +than specialised forms. + + +#### Modes + +Phantom SASI support incldues all three supported modes for SASI. All analyzers include support for basic comparison + operations, as listed below. Some operators have two flavours, such as `==` and `eqs`, `<` and `lt` and so on. They + are all available via the standard `import com.outworkers.phantom.dsl._` import. + + +| Operator | Natural Language equivalent | +| -------- | -------------------------------------- | +| `==` | Equality operator | +| `eqs` | Equality operator | +| `<` | Strictly lower than operator | +| `lt` | Strictly lower than operator | +| `<=` | Lower than or equal to operator | +| `lte` | Lower than or equal to operator | +| `>` | Strictly greater than operator | +| `gt` | Strictly greater than operator | +| `>=` | Greater than or equal to operator | +| `gte` | Greater than or equal to operator | + + +There are two modes directed specifically at text columns, namely `Mode.Prefix` and `Mode.Contains`. By using + these modes, you will be able to perform text specific queries using the `like` operator. + +##### Mode.Prefix + +In addition to the standard operations, the `Prefix` mode will allow you to perform `like(prefix("text"))` style + queries. + +Examples can be found in [SASIIntegrationTest.scala](/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/sasi/SASIIntegrationTest.scala). + +Example query, based on the schema defined above. + +```scala +db.multiSasiTable.select.where(_.phoneNumber like prefix(pre)).fetch() +``` + +#### Mode.Contains + +This will enable further queries for text columns, such as `like(suffix("value"))` and `like(contains("value`))`, as well +as prefix style queries. + +Examples can be found in [SASIIntegrationTest.scala](/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/sasi/SASIIntegrationTest.scala). + +Example possible queries, based on the schema defined above. + +```scala +val pre = "text" +db.multiSasiTable.select.where(_.name like prefix(pre)).fetch() +db.multiSasiTable.select.where(_.name like contains(pre)).fetch() +db.multiSasiTable.select.where(_.name like suffix(pre)).fetch() +``` + +#### Mode.Sparse + +As suggested in the official SASI tutorial, `Mode.Sparse` is directly targeted at numerical columns and it's a way +to enable standard operators for numerical columns that are not part of the primary key. All standard operators can be used. + +Sparse mode SASI indexes cannot define analyzers, and automated schema creation will fail if you attempt to use an analyzer +in `Mode.Sparse` + +Examples can be found in [SASIIntegrationTest.scala](/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/sasi/SASIIntegrationTest.scala). + +Example possible queries. + +```scala +val target = 5- +db.multiSasiTable.select.where(_.customers eqs 50).fetch() + +// Select all entries with at least 50 customers +db.multiSasiTable.select.where(_.customers >= 50).fetch() + +// Select all entries with at most 50 customers +db.multiSasiTable.select.where(_.customers <= 50).fetch() +``` + diff --git a/docs/querying/select.md b/docs/querying/select.md index f3f18f5f3..a1ed2ef3d 100644 --- a/docs/querying/select.md +++ b/docs/querying/select.md @@ -18,7 +18,8 @@ If we would just use a timestamp type, if we were to receive two logs for the sa the entries would override each other in Cassandra, because in effect they would have the same partition key and the same clustering key, so the whole primary key would be identical. -```scala +```tut + import com.outworkers.phantom.dsl._ case class CarMetric( @@ -28,7 +29,7 @@ case class CarMetric( tirePressure: Double ) -abstract class AnalyticsEntries extends CassandraTable[AnalyticsEntries, CarMetric] with RootConnector { +abstract class AnalyticsEntries extends Table[AnalyticsEntries, CarMetric] { object car extends UUIDColumn with PartitionKey object id extends TimeUUIDColumn with ClusteringOrder with Descending object velocity extends DoubleColumn @@ -43,9 +44,13 @@ The following is the list of available query methods on a select, and it can be in various ways. -| Method name | Return type | Purpose | -| =========== | =========== | ======= | - +| Method name | Return type | Purpose | +| --------------------- | ----------------------------------- | ----------------------------------------------------- | +| `future` | `com.ouwotkers.phantom.ResultSet` | Available on all queries, returns the raw result type. | +| `one` | `Option[R]` | Select a single result as an `Option[R]` | +| `fetch` | `List[R]` | Select a small list of records without a paging state | +| `fetch(modifier)` | `List[R]` | Select a small list of records without a paging state | +| `fetchRecord` | `ListResult[R]` | Fetch a small result together with the `ResultSet` | #### Paginating results by leveraging paging states and automated Cassandra pagination. @@ -72,7 +77,7 @@ The average of a `Float` column will come back as `scala.Float` and so on. | Scala operator | Cassandra operator | Return type | -| ============== | ==================== | ===================== | +| -------------- | -------------------- | --------------------- | | `sum[T : Numeric]` | SUM | `Option[T : Numeric]` | | `min[T : Numeric]` | MIN | `Option[T : Numeric]` | | `max[T : Numeric]` | MAX | `Option[T : Numeric]` | @@ -80,7 +85,7 @@ The average of a `Float` column will come back as `scala.Float` and so on. | `count` | COUNT | `Option[scala.Long]` | To take advantage of these operators, simply use the default import, combined with the `function` argument -and the `aggregate` function. A few examples are found in [SelectFunctionsTesting.scala](https://github.com/outworkers/phantom/blob/develop/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala#L99). +and the `aggregate` function. A few examples are found in [SelectFunctionsTesting.scala](/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/specialized/SelectFunctionsTesting.scala#L99). The structure of an aggregation query is simple, and the rturn type is diff --git a/docs/roadmap.md b/docs/roadmap.md index 71fe19e3b..e88ab12d5 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -8,23 +8,23 @@ In maintenance mode, users are actively encouraged to upgrade to 2.0.x series. #### Phantom 2.0.x series -- [ ] Support for case sensitive Cassandra tables and keyspace names. +- [x] Support for case sensitive Cassandra tables and keyspace names. -- [ ] Support for tuple columns and collection columns +- [x] Support for tuple columns and collection columns -- [ ] SASI Index support. +- [x] SASI Index support. - [ ] Use `QueryPart` as a building block for schema inference during table auto-generation. - [ ] Deeper integration of CQL features, such as advanced `USING` clauses, `GROUP BY` and `PER PARTITION LIMIT`.(2.0.0) -- [ ] Move documentation back to a branch and to a dedicated versioned website based on Git tags. +- [x] Move documentation back to a branch and to a dedicated versioned website based on Git tags. - [ ] Added implicit.ly integration to notify our audience of new releases. - [ ] Bring test coverage to 100%(2.0.0) -- [ ] Add ability to specify compile time implicit configuration, such as case sensitive column names. +- [x] Add ability to specify compile time implicit configuration, such as case sensitive column names. - [ ] A new website with highly improved documentation, as well as a per version docs browser. @@ -38,12 +38,16 @@ In maintenance mode, users are actively encouraged to upgrade to 2.0.x series. - [x] Full support for UDTs. - [x] Support for UDT collection types. -- [ ] Scala language based User defined functions. -- [ ] Scala language based User defined aggregates. -- [ ] Cassandra 3.8+ support. -- [ ] Materialised views. -- [ ] Development automated schema migrations. +- [x] Support for nested UDTs. +- [x] Cassandra 3.8+ support. + #### v0.3.0 + - [ ] Auto-tables, ability to generate queries entirely of out `case class` definitions. - [ ] Advanced table migrations. +- [ ] Materialised views. +- [ ] Development mode automated schema migrations. +- [ ] Scala language based User defined functions. +- [ ] Scala language based User defined aggregates. +- [ ] Support for automated schema generation during `database.autocreate` for UDT types. \ No newline at end of file diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/ImplicitMechanism.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/ImplicitMechanism.scala index 1dc46095a..abaeffbbd 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/ImplicitMechanism.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/ImplicitMechanism.scala @@ -18,7 +18,7 @@ package com.outworkers.phantom.builder.ops import com.outworkers.phantom.builder.QueryBuilder import com.outworkers.phantom.builder.clauses.{CompareAndSetClause, OrderingColumn, WhereClause} import com.outworkers.phantom.builder.primitives.Primitive -import com.outworkers.phantom.builder.query.sasi.{Mode, SASINumericOps, SASITextOps} +import com.outworkers.phantom.builder.query.sasi.{Mode, SASITextOps} import com.outworkers.phantom.column._ import com.outworkers.phantom.dsl._ import com.outworkers.phantom.keys.{Indexed, Undroppable} @@ -181,12 +181,6 @@ private[phantom] trait ImplicitMechanism extends ModifyMechanism { new QueryColumn[RR](col.name) } - implicit def sasiNumericOps[RR : Primitive : Numeric]( - col: AbstractColumn[RR] with SASIIndex[Mode.Sparse] - ): SASINumericOps[RR] = { - new SASINumericOps[RR](col.name) - } - implicit def sasiTextOps[M <: Mode]( col: AbstractColumn[String] with SASIIndex[M] )(implicit ev: Primitive[String]): SASITextOps[M] = { diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/QueryColumn.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/QueryColumn.scala index c090b3583..284101521 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/QueryColumn.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/ops/QueryColumn.scala @@ -28,9 +28,7 @@ import com.outworkers.phantom.builder.query.prepared.{PrepareMark} * @param name The name of the column. * @tparam RR The type of the value the column holds. */ -sealed class QueryColumn[RR : Primitive](val name: String) { - - private[this] val p = implicitly[Primitive[RR]] +sealed class QueryColumn[RR](val name: String)(implicit p: Primitive[RR]) { def eqs(value: RR): WhereClause.Condition = { new WhereClause.Condition(QueryBuilder.Where.eqs(name, p.asCql(value))) diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/sasi/ops.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/sasi/ops.scala index 684412bb9..3b016036e 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/sasi/ops.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/sasi/ops.scala @@ -70,10 +70,6 @@ object AllowedSASIOp { new AllowedSASIOp[Mode.Contains, PrefixOp[T]] {} } - implicit def modeSparseCanPrefix[T]: AllowedSASIOp[Sparse, PrefixOp[T]] = { - new AllowedSASIOp[Mode.Sparse, PrefixOp[T]] {} - } - implicit def modeContainsCanSuffix[T]: AllowedSASIOp[Contains, SuffixOp[T]] = { new AllowedSASIOp[Mode.Contains, SuffixOp[T]] {} } @@ -91,13 +87,3 @@ class SASITextOps[M <: Mode]( new WhereClause.Condition(QueryBuilder.SASI.likeAny(col, op.qb.queryString)) } } - -class SASINumericOps[RR : Numeric]( - col: String -) { - - def like[Op <: SASIOp[RR]](op: Op)(implicit ev: AllowedSASIOp[Mode.Sparse, Op]): WhereClause.Condition = { - new WhereClause.Condition(QueryBuilder.SASI.likeAny(col, op.qb.queryString)) - } -} - diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/column/AbstractColumn.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/column/AbstractColumn.scala index f63603e84..110dee26e 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/column/AbstractColumn.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/column/AbstractColumn.scala @@ -50,7 +50,6 @@ trait AbstractColumn[@specialized(Int, Double, Float, Long, Boolean, Short) T] { def isAscending: Boolean = false def isMapKeyIndex: Boolean = false def isMapEntryIndex: Boolean = false - def isSASI: Boolean = false private[this] lazy val _name: String = { cm.reflect(this).symbol.name.toTypeName.decodedName.toString diff --git a/phantom-dsl/src/main/scala/com/outworkers/phantom/keys/SASIIndex.scala b/phantom-dsl/src/main/scala/com/outworkers/phantom/keys/SASIIndex.scala index a0060b497..2c50109f2 100644 --- a/phantom-dsl/src/main/scala/com/outworkers/phantom/keys/SASIIndex.scala +++ b/phantom-dsl/src/main/scala/com/outworkers/phantom/keys/SASIIndex.scala @@ -27,7 +27,5 @@ trait SASIIndex[M <: Mode] { def analyzer: Analyzer[M] def analyzerOptions: CQLQuery = analyzer.qb - - //abstract override def isSASI: Boolean = true } diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/sasi/SASIIntegrationTest.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/sasi/SASIIntegrationTest.scala index 9b7e82ed9..73f1af80f 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/sasi/SASIIntegrationTest.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/sasi/SASIIntegrationTest.scala @@ -97,4 +97,73 @@ class SASIIntegrationTest extends PhantomSuite { } + it should "allow retrieving gte results using a normal operator in Mode.Sparse" in { + val pre = 55 + val samples = genList[MultiSASIRecord]().map(item => item.copy(customers = pre)) + + if (cassandraVersion.value >= Version.`3.4.0`) { + val chain = for { + stored <- db.multiSasiTable.truncate().future() + stored <- Future.sequence(samples.map(db.multiSasiTable.storeRecord(_))) + query <- db.multiSasiTable.select.where(_.customers >= pre).fetch() + } yield query + + whenReady(chain) { results => + results should contain theSameElementsAs samples + } + } + } + + it should "allow retrieving lte results using a normal operator in Mode.Sparse" in { + val pre = 55 + val samples = genList[MultiSASIRecord]().map(item => item.copy(customers = pre)) + + if (cassandraVersion.value >= Version.`3.4.0`) { + val chain = for { + stored <- db.multiSasiTable.truncate().future() + stored <- Future.sequence(samples.map(db.multiSasiTable.storeRecord(_))) + query <- db.multiSasiTable.select.where(_.customers <= pre).fetch() + } yield query + + whenReady(chain) { results => + results should contain theSameElementsAs samples + } + } + } + + + it should "allow retrieving == results using a normal operator in Mode.Sparse" in { + val pre = 55 + val samples = genList[MultiSASIRecord]().map(item => item.copy(customers = pre)) + + if (cassandraVersion.value >= Version.`3.4.0`) { + val chain = for { + stored <- db.multiSasiTable.truncate().future() + stored <- Future.sequence(samples.map(db.multiSasiTable.storeRecord(_))) + query <- db.multiSasiTable.select.where(_.customers eqs pre).fetch() + } yield query + + whenReady(chain) { results => + results should contain theSameElementsAs samples + } + } + } + + it should "retrieve no results for an invalid clause in Mode.Sparse" in { + val pre = 55 + val samples = genList[MultiSASIRecord]().map(item => item.copy(customers = pre)) + + if (cassandraVersion.value >= Version.`3.4.0`) { + val chain = for { + stored <- db.multiSasiTable.truncate().future() + stored <- Future.sequence(samples.map(db.multiSasiTable.storeRecord(_))) + query <- db.multiSasiTable.select.where(_.customers > pre).fetch() + } yield query + + whenReady(chain) { results => + results.size shouldEqual 0 + } + } + } + } diff --git a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/sasi/SASIQueriesTest.scala b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/sasi/SASIQueriesTest.scala index a467efdaa..0e1a17df1 100644 --- a/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/sasi/SASIQueriesTest.scala +++ b/phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/sasi/SASIQueriesTest.scala @@ -66,10 +66,9 @@ class SASIQueriesTest extends PhantomSuite { qb shouldEqual s"SELECT * FROM ${db.space.name}.${database.multiSasiTable.tableName} WHERE phoneNumber LIKE '078%';" } - it should "allow using a prefix clause on a Mode.Sparse Text column" in { - val qb = database.multiSasiTable.select.where(_.customers like prefix(5)).queryString - - qb shouldEqual s"SELECT * FROM ${db.space.name}.${database.multiSasiTable.tableName} WHERE customers LIKE '5%';" + it should "not allow like queries in Mode.Sparse" in { + val pre = 55 + "db.multiSasiTable.select.where(_.customers like(prefix(pre)) pre).fetch()" shouldNot compile } it should "not allow using a suffix clause on a Mode.Sparse Text column" in { diff --git a/project/plugins.sbt b/project/plugins.sbt index 4540dd7fc..d3fa6580e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -59,4 +59,6 @@ addSbtPlugin("com.twitter" % "scrooge-sbt-plugin" % Versions.scrooge) addSbtPlugin("com.eed3si9n" % "sbt-doge" % "0.1.5") +addSbtPlugin("org.tpolecat" % "tut-plugin" % "0.5.2") + libraryDependencies += "org.slf4j" % "slf4j-nop" % "1.7.22" \ No newline at end of file