Skip to content

Commit

Permalink
Merge adda4d2 into 2a467ed
Browse files Browse the repository at this point in the history
  • Loading branch information
alexflav23 committed Dec 30, 2016
2 parents 2a467ed + adda4d2 commit 69414f7
Show file tree
Hide file tree
Showing 12 changed files with 261 additions and 76 deletions.
7 changes: 5 additions & 2 deletions build.sbt
Expand Up @@ -35,6 +35,8 @@ lazy val Versions = new {
val cassandraUnit = "3.0.0.1"
val javaxServlet = "3.0.1"
val typesafeConfig = "1.3.1"
val joda = "2.9.4"
val jodaConvert = "1.8.1"

val twitterUtilVersion: String => String = {
s => CrossVersion.partialVersion(s) match {
Expand Down Expand Up @@ -183,11 +185,12 @@ lazy val phantomDsl = (project in file("phantom-dsl")).configs(
compilerPlugin("org.scalamacros" % "paradise" % "2.1.0" cross CrossVersion.full),
"com.outworkers" %% "diesel-engine" % Versions.diesel,
"com.chuusai" %% "shapeless" % Versions.shapeless,
"joda-time" % "joda-time" % "2.9.4",
"org.joda" % "joda-convert" % "1.8.1",
"joda-time" % "joda-time" % Versions.joda,
"org.joda" % "joda-convert" % Versions.jodaConvert,
"com.datastax.cassandra" % "cassandra-driver-core" % Versions.datastax,
"com.datastax.cassandra" % "cassandra-driver-extras" % Versions.datastax,
"org.json4s" %% "json4s-native" % Versions.json4s,
"org.scalamock" %% "scalamock-scalatest-support" % "3.4.2" % Test,
"org.scalacheck" %% "scalacheck" % Versions.scalacheck % Test,
"com.outworkers" %% "util-testing" % Versions.util % Test,
"com.storm-enroute" %% "scalameter" % Versions.scalameter % Test,
Expand Down
13 changes: 12 additions & 1 deletion build/publish_develop.sh
Expand Up @@ -55,7 +55,18 @@ then
echo "Bintray credentials still not found"
fi

sbt version-bump-patch git-tag
COMMIT_MSG=$(git log -1 --pretty=%B 2>&1)
COMMIT_SKIP_MESSAGE = "[version skip]"

echo "Last commit message $COMMIT_MSG"

if [[ $COMMIT_MSG == *"${COMMIT_SKIP_MESSAGE}"* ]]
then
echo "Skipping version bump and simply tagging"
sbt git-tag
else
sbt version-bump-patch git-tag
fi

echo "Pushing tag to GitHub."
git push --tags "https://${github_token}@${GH_REF}"
Expand Down
Expand Up @@ -93,6 +93,8 @@ abstract class CassandraTable[T <: CassandraTable[T, R], R](

final def insert()(implicit keySpace: KeySpace): InsertQuery.Default[T, R] = InsertQuery(instance)

final def store()(implicit keySpace: KeySpace): InsertQuery.Default[T, R] = helper.store(instance)

final def delete()(implicit keySpace: KeySpace): DeleteQuery.Default[T, R] = DeleteQuery[T, R](instance)

final def delete(
Expand Down
Expand Up @@ -111,6 +111,46 @@ object Primitive {
*/
implicit def materializer[T]: Primitive[T] = macro PrimitiveMacro.materializer[T]

/**
* Derives primitives and encodings for a non standard type.
* @param to The function that converts a [[Target]] instance to a [[Source]] instance.
* @param from The function that converts a [[Source]] instance to a [[Target]] instance.
* @tparam Target The type you want to derive a primitive for.
* @tparam Source The source type of the primitive, must already have a primitive defined for it.
* @return A new primitive that can interact with the target type.
*/
def derive[Target, Source : Primitive](to: Target => Source)(from: Source => Target): Primitive[Target] = {

val source = implicitly[Primitive[Source]]

new Primitive[Target] {
override type PrimitiveType = source.PrimitiveType

/**
* Converts the type to a CQL compatible string.
* The primitive is responsible for handling all aspects of adequate escaping as well.
* This is used to generate the final queries from domain objects.
*
* @param value The strongly typed value.
* @return The string representation of the value with respect to CQL standards.
*/
override def asCql(value: Target): String = source.asCql(to(value))

override def cassandraType: String = source.cassandraType

override def fromRow(column: String, row: GettableByNameData): Try[Target] = {
source.fromRow(column, row) map from
}

override def fromRow(index: Int, row: GettableByIndexData): Try[Target] = {
source.fromRow(index, row) map from
}
override def fromString(value: String): Target = from(source.fromString(value))

override def clz: Class[source.PrimitiveType] = source.clz
}
}

/**
* Convenience method to materialise the context bound and return a reference to it.
* This is somewhat shorter syntax than using implicitly.
Expand Down
Expand Up @@ -50,9 +50,9 @@ abstract class AbstractSetColumn[Owner <: CassandraTable[Owner, Record], Record,
class SetColumn[Owner <: CassandraTable[Owner, Record], Record, RR : Primitive](table: CassandraTable[Owner, Record])
extends AbstractSetColumn[Owner, Record, RR](table) with PrimitiveCollectionValue[RR] {

override val valuePrimitive = Primitive[RR]
override val valuePrimitive: Primitive[RR] = Primitive[RR]

val cassandraType = QueryBuilder.Collections.setType(valuePrimitive.cassandraType).queryString
val cassandraType: String = QueryBuilder.Collections.setType(valuePrimitive.cassandraType).queryString

override def qb: CQLQuery = {
if (shouldFreeze) {
Expand Down
Expand Up @@ -20,7 +20,7 @@ import scala.reflect.macros.blackbox
class MacroUtils(val c: blackbox.Context) {
import c.universe._

def fields(tpe: Type): Iterable[(Name, Type)] = {
def caseFields(tpe: Type): Iterable[(Name, Type)] = {
object CaseField {
def unapply(arg: TermSymbol): Option[(Name, Type)] = {
if (arg.isVal && arg.isCaseAccessor) {
Expand Down
Expand Up @@ -16,8 +16,10 @@
package com.outworkers.phantom.macros

import com.datastax.driver.core.Row
import com.outworkers.phantom.builder.query.InsertQuery
import com.outworkers.phantom.{CassandraTable, SelectTable}
import com.outworkers.phantom.column.AbstractColumn
import com.outworkers.phantom.dsl.KeySpace
import com.outworkers.phantom.keys.{ClusteringOrder, PartitionKey, PrimaryKey}

import scala.reflect.macros.blackbox
Expand All @@ -31,31 +33,43 @@ trait TableHelper[T <: CassandraTable[T, R], R] {
def tableKey(table: T): String

def fields(table: CassandraTable[T, R]): Set[AbstractColumn[_]]

def store(table: T)(implicit space: KeySpace): InsertQuery.Default[T, R]
}

object TableHelper {
implicit def fieldsMacro[T <: CassandraTable[T, R], R]: TableHelper[T, R] = macro TableHelperMacro.macroImpl[T, R]
}


@macrocompat.bundle
class TableHelperMacro(override val c: blackbox.Context) extends MacroUtils(c) {


import c.universe._

val rowType = tq"com.datastax.driver.core.Row"
val builder = q"com.outworkers.phantom.builder.QueryBuilder"
val strTpe = tq"java.lang.String"
val colType = tq"com.outworkers.phantom.column.AbstractColumn[_]"
val collections = q"scala.collection.immutable"
val rowTerm = TermName("row")
val tableTerm = TermName("table")
case class ColumnMember(
name: TermName,
tpe: Type
) {
def symbol: Symbol = tpe.typeSymbol
}

private[this] val rowType = tq"com.datastax.driver.core.Row"
private[this] val builder = q"com.outworkers.phantom.builder.QueryBuilder"
private[this] val strTpe = tq"java.lang.String"
private[this] val colType = tq"com.outworkers.phantom.column.AbstractColumn[_]"
private[this] val collections = q"scala.collection.immutable"
private[this] val rowTerm = TermName("row")
private[this] val tableTerm = TermName("table")
private[this] val keyspaceType = tq"com.outworkers.phantom.connectors.KeySpace"

val knownList = List("Any", "Object", "RootConnector")

val tableSym = typeOf[CassandraTable[_, _]].typeSymbol
val selectTable = typeOf[SelectTable[_, _]].typeSymbol
val rootConn = typeOf[SelectTable[_, _]].typeSymbol
val colSymbol = typeOf[AbstractColumn[_]].typeSymbol
val tableSym: Symbol = typeOf[CassandraTable[_, _]].typeSymbol
val selectTable: Symbol = typeOf[SelectTable[_, _]].typeSymbol
val rootConn: Symbol = typeOf[SelectTable[_, _]].typeSymbol
val colSymbol: Symbol = typeOf[AbstractColumn[_]].typeSymbol

val exclusions: Symbol => Option[Symbol] = s => {
val sig = s.typeSignature.typeSymbol
Expand All @@ -71,6 +85,10 @@ class TableHelperMacro(override val c: blackbox.Context) extends MacroUtils(c) {
columns.filter(_.baseClasses.exists(typeOf[Filter].typeSymbol == ))
}

def insertQueryType(table: Type, record: Type): Tree = {
tq"com.outworkers.phantom.builder.query.InsertQuery.Default[$table, $record]"
}

/**
* This method will check for common Cassandra anti-patterns during the intialisation of a schema.
* If the Schema definition violates valid CQL standard, this function will throw an error.
Expand Down Expand Up @@ -118,6 +136,85 @@ class TableHelperMacro(override val c: blackbox.Context) extends MacroUtils(c) {

}

trait TableMatchResult

case class Match(
results: List[ColumnMember],
partition: Boolean
) extends TableMatchResult

case object NoMatch extends TableMatchResult

def show(list: List[Type]): String = {
list map(tpe => showCode(tq"$tpe")) mkString ", "
}

def show(list: Iterable[Type]): String = {
list map(tpe => showCode(tq"$tpe")) mkString ", "
}

/**
* Finds a matching subset of columns inside a table definition where the extracted
* type from a table does not need to include all of the columns inside a table.
*
* This addresses [[https://websudos.atlassian.net/browse/PHANTOM-237]].
*
* @param recordMembers The type members of the record type.
* @param members The type members of the table.
* @return
*/
def findMatchingSubset(
tableName: Name,
members: List[ColumnMember],
recordMembers: Iterable[ColumnMember]
): TableMatchResult = {
if (members.isEmpty) {
NoMatch
} else {

if (members.size >= recordMembers.size && members.zip(recordMembers).forall { case (rec, col) =>
rec.tpe =:= col.tpe
}) {
Console.println(s"Successfully derived extractor for $tableName using columns: ${members.map(_.name.decodedName.toString).mkString(", ")}")
Match(members, recordMembers.size != members.size)
} else {
findMatchingSubset(tableName, members.tail, recordMembers)
}
}
}

def extractColumnMembers(table: Type, columns: List[Symbol]): List[ColumnMember] = {
/**
* We filter for the members of the table type that
* directly subclass [[AbstractColumn[_]]. For every one of those methods, we
* are going to look at what type argument was passed by the specific column definition
* when extending [[AbstractColumn[_]] as this will tell us the Scala output type
* of the given column.
* We create a list of these types and if they match the case class types expected,
* it means we can auto-generate a fromRow implementation.
*/
columns.map { member =>
val memberType = member.typeSignatureIn(table)

memberType.baseClasses.find(colSymbol ==) match {
case Some(root) =>
// Here we expect to have a single type argument or type param
// because we know root here will point to an AbstractColumn[_] symbol.
root.typeSignature.typeParams match {
// We use the special API to see what type was passed through to AbstractColumn[_]
// with special thanks to https://github.com/joroKr21 for helping me not rip
// the remainder of my hair off while uncovering this marvelous macro API method.
case head :: Nil => ColumnMember(
member.asModule.name.toTermName,
head.asType.toType.asSeenFrom(memberType, colSymbol)
)
case _ => c.abort(c.enclosingPosition, "Expected exactly one type parameter provided for root column type")
}
case None => c.abort(c.enclosingPosition, s"Could not find root column type for ${member.asModule.name}")
}
}
}

/**
* Materializes an extractor method for a table, the so called "fromRow" method.
*
Expand Down Expand Up @@ -153,67 +250,28 @@ class TableHelperMacro(override val c: blackbox.Context) extends MacroUtils(c) {
* definition could not be inferred.
*/
def materializeExtractor[T](tableTpe: Type, recordTpe: Type, columns: List[Symbol]): Option[Tree] = {
val columnNames = columns.map(
tpe => {
q"$tableTerm.${tpe.typeSignatureIn(tableTpe).typeSymbol.name.toTermName}.apply($rowTerm)"
}
)

/**
* First we create a set of ordered types corresponding to the type signatures
* found in the case class arguments.
*/
val recordMembers = fields(recordTpe) map (_._2)
val recordMembers = caseFields(recordTpe) map { case (nm, tp) => ColumnMember(nm.toTermName, tp) }

/**
* Then we do the same for the columns, we filter for the members of the table type that
* directly subclass [[AbstractColumn[_]]. For every one of those methods, we
* are going to look at what type argumnt was passed by the specific column definition
* when extending [[AbstractColumn[_]] as this will tell us the Scala output type
* of the given column.
* We create a list of these types and if they match the case class types expected,
* it means we can auto-generate a fromRow implementation.
*/
val colMembers = columns.map { member =>
val memberType = member.typeSignatureIn(tableTpe)

memberType.baseClasses.find(colSymbol ==) match {
case Some(root) =>
// Here we expect to have a single type argument or type param
// because we know root here will point to an AbstractColumn[_] symbol.
root.typeSignature.typeParams match {
// We use the special API to see what type was passed through to AbstractColumn[_]
// with special thanks to https://github.com/joroKr21 for helping me not rip
// the remainder of my hair off while uncovering this marvelous macro API method.
case head :: Nil => head.asType.toType.asSeenFrom(memberType, colSymbol)
case _ => c.abort(c.enclosingPosition, "Expected exactly one type parameter provided for root column type")
}
case None => c.abort(c.enclosingPosition, s"Could not find root column type for ${member.asModule.name}")
}
}
val colMembers = extractColumnMembers(tableTpe, columns)

val tableSymbolName = tableTpe.typeSymbol.name
Console.println(recordMembers.map(_.typeSymbol.name.toTypeName.decodedName.toString).mkString(", "))
Console.println(colMembers.map(_.typeSymbol.name.toTermName.decodedName.toString).mkString(", "))

if (recordMembers.size == colMembers.size) {
if (recordMembers.zip(colMembers).forall { case (rec, col) => {
val res = rec =:= col
Console.println(s"$rec was equal to $col status: $res" )
res
} }) {

findMatchingSubset(tableSymbolName, colMembers, recordMembers) match {
case Match(results, _) if recordTpe.typeSymbol.isClass && recordTpe.typeSymbol.asClass.isCaseClass => {
val columnNames = results.map { member =>
q"$tableTerm.${member.name}.apply($rowTerm)"
}

Some(q"""new $recordTpe(..$columnNames)""")
} else {
Console.println(s"The case class records did not match the column member types for $tableSymbolName")
Console.println(recordMembers.map(_.typeSymbol.name.toTypeName.decodedName.toString).mkString(", "))
Console.println(colMembers.map(_.typeSymbol.name.toTermName.decodedName.toString).mkString(", "))
}
case _ => {
Console.println(s"Couldn't automatically infer an extractor for $tableSymbolName")
None
}
} else {
Console.println(s"There were ${recordMembers.size} case class fields and ${colMembers.size} columns for ${tableSymbolName}")
Console.println(recordMembers.map(_.typeSymbol.name.toTypeName.decodedName.toString).mkString(", "))
Console.println(colMembers.map(_.typeSymbol.name.toTermName.decodedName.toString).mkString(", "))
None
}
}

Expand Down Expand Up @@ -252,10 +310,14 @@ class TableHelperMacro(override val c: blackbox.Context) extends MacroUtils(c) {

val accessors = columns.map(_.asTerm.name).map(tm => q"table.instance.${tm.toTermName}")

q"""
val tree = q"""
new com.outworkers.phantom.macros.TableHelper[$tableType, $rTpe] {
def tableName: $strTpe = $tableName

def store($tableTerm: $tableType)(implicit space: $keyspaceType): ${insertQueryType(tableType, rTpe)} = {
$tableTerm.insert()
}

def tableKey($tableTerm: $tableType): $strTpe = ${inferPrimaryKey(tableName, tableType, referenceColumns.map(_.typeSignature))}

def fromRow($tableTerm: $tableType, $rowTerm: $rowType): $rTpe = $fromRowDefinition
Expand All @@ -265,6 +327,7 @@ class TableHelperMacro(override val c: blackbox.Context) extends MacroUtils(c) {
}
}
"""
tree
}

}

0 comments on commit 69414f7

Please sign in to comment.