Skip to content

Commit

Permalink
profiles
Browse files Browse the repository at this point in the history
  • Loading branch information
dnvriend committed Feb 19, 2017
1 parent 1e95e4f commit 11d9d2c
Show file tree
Hide file tree
Showing 10 changed files with 124 additions and 12 deletions.
5 changes: 5 additions & 0 deletions README.md
Expand Up @@ -22,6 +22,11 @@ class AlbumTable(tag: Tag) extends Table[Album](tag, "album") {
lazy val AlbumTable = TableQuery[AlbumTable]
```

## Custom Column Mapping
ColumnType typeclass



### Queries
A DSL for building SQL

Expand Down
1 change: 1 addition & 0 deletions src/main/resources/application.conf
Expand Up @@ -32,6 +32,7 @@ akka {

# Default database configuration
slick.dbs.default.driver = "slick.driver.H2Driver$"
//slick.dbs.default.driver = "slick.jdbc.PostgresProfile$"
slick.dbs.default.db.driver = "org.h2.Driver"
slick.dbs.default.db.url = "jdbc:h2:mem:play"
slick.dbs.default.db.maximumPoolSize = 10
Expand Down
Expand Up @@ -20,7 +20,7 @@ class AkkaPersistenceRepository @Inject() (protected val dbConfigProvider: Datab
def getProfile: JdbcProfile = profile
def database: JdbcBackend#DatabaseDef = db

class Journal(tag: Tag) extends Table[JournalRow](tag, "JDBC_JOURNAL") {
class JournalTable(tag: Tag) extends Table[JournalRow](tag, "JDBC_JOURNAL") {
def * : ProvenShape[JournalRow] = (ordering, deleted, persistenceId, sequenceNumber, message, tags) <> (JournalRow.tupled, JournalRow.unapply)

val ordering: Rep[Long] = column[Long]("ordering", O.AutoInc)
Expand All @@ -32,9 +32,9 @@ class AkkaPersistenceRepository @Inject() (protected val dbConfigProvider: Datab
val pk: PrimaryKey = primaryKey("journal_pk", (persistenceId, sequenceNumber))
}

lazy val JournalTable = new TableQuery(tag => new Journal(tag))
lazy val JournalTable = TableQuery[JournalTable]

class Snapshot(tag: Tag) extends Table[SnapshotRow](tag, "JDBC_SNAPSHOTS") {
class SnapshotTable(tag: Tag) extends Table[SnapshotRow](tag, "JDBC_SNAPSHOTS") {
def * : ProvenShape[SnapshotRow] = (persistenceId, sequenceNumber, created, snapshot) <> (SnapshotRow.tupled, SnapshotRow.unapply)

val persistenceId: Rep[String] = column[String]("persistence_id", O.Length(255, varying = true))
Expand All @@ -44,7 +44,7 @@ class AkkaPersistenceRepository @Inject() (protected val dbConfigProvider: Datab
val pk: PrimaryKey = primaryKey("snapshot_pk", (persistenceId, sequenceNumber))
}

lazy val SnapshotTable = new TableQuery(tag => new Snapshot(tag))
lazy val SnapshotTable = TableQuery[SnapshotTable]

def dropCreateSchema: Future[Unit] = {
val schema = JournalTable.schema ++ SnapshotTable.schema
Expand Down
8 changes: 6 additions & 2 deletions src/main/scala/tables/Example01.scala
Expand Up @@ -4,11 +4,15 @@ import slick.dbio.Effect.{ Read, Schema, Write }

import scala.concurrent._
import scala.concurrent.duration._
import slick.jdbc.H2Profile.api._
import slick.jdbc.PostgresProfile.api._
import slick.lifted.{ ProvenShape, TableQuery }
import slick.sql.{ FixedSqlAction, FixedSqlStreamingAction }

// runMain tables.Example01
/**
* Store Album types in the database based on a case class
*
* runMain tables.Example01
*/
object Example01 extends App {

// Tables -- mappings between scala types and database tables
Expand Down
18 changes: 14 additions & 4 deletions src/main/scala/tables/Example02.scala
Expand Up @@ -8,7 +8,17 @@ import slick.sql.{ FixedSqlAction, FixedSqlStreamingAction }
import scala.concurrent._
import scala.concurrent.duration._

// runMain tables.Example02
/**
* Store Album types in the database based on a normal class
* so here we have to create our own tupled method for reading data.
*
* When reading data, slick will use the default projection to convert
* tuples => classes and when writing data, slick will use the default
* projection to convert classes => tuples so we need to provide these
* two functions.
*
* runMain tables.Example02
*/
object Example02 extends App {

// Tables -- mappings between scala types and database tables
Expand All @@ -17,10 +27,10 @@ object Example02 extends App {
def apply(artist: String, title: String, year: Int, id: Long = 0): Album =
new Album(artist, title, year, id)

def tupled(p: (String, String, Int, Long)): Album = p match {
def createAlbum(p: (String, String, Int, Long)): Album = p match {
case (artist, title, year, id) => Album(artist, title, year, id)
}
def unapply(album: Album): Option[(String, String, Int, Long)] =
def extractAlbum(album: Album): Option[(String, String, Int, Long)] =
Option((album.artist, album.title, album.year, album.id))
}

Expand All @@ -44,7 +54,7 @@ object Example02 extends App {
// this is the default projection for the table. It tells us how to convert between a
// tuple of these columns of the database and the Album datatype that we want to map
// using this table.
def * : ProvenShape[Album] = (artist, title, year, id).<>(Album.tupled, Album.unapply)
def * : ProvenShape[Album] = (artist, title, year, id).<>(Album.createAlbum, Album.extractAlbum)
}

lazy val AlbumTable = TableQuery[AlbumTable]
Expand Down
88 changes: 88 additions & 0 deletions src/main/scala/tables/Example03.scala
@@ -0,0 +1,88 @@
package tables

import slick.dbio.Effect.{ Read, Schema, Write }
import slick.jdbc.H2Profile.api._
import slick.lifted.{ ProvenShape, TableQuery }
import slick.sql.{ FixedSqlAction, FixedSqlStreamingAction }

import scala.concurrent._
import scala.concurrent.duration._

/**
* Custom Data Type in an individual column
*
* runMain tables.Example03
*/
object Example03 extends App {

// Tables -- mappings between scala types and database tables

case class Album(artist: String, title: String, year: Int, id: Long = 0)

// A standard Slick table type representing an SQL table type to store instances
// of type Album.
class AlbumTable(tag: Tag) extends Table[Album](tag, "albums") {

// definitions of each of the columns
def artist: Rep[String] = column[String]("artist")
def title: Rep[String] = column[String]("title")
def year: Rep[Int] = column[Int]("year")

// the 'id' column has a couple of extra 'flags' to say
// 'make this a primary key' and 'make this an auto incrementing primary key'
def id: Rep[Long] = column[Long]("id", O.PrimaryKey, O.AutoInc)

// this is the default projection for the table. It tells us how to convert between a
// tuple of these columns of the database and the Album datatype that we want to map
// using this table.
def * : ProvenShape[Album] = (artist, title, year, id) <> (Album.tupled, Album.unapply)
}

lazy val AlbumTable = TableQuery[AlbumTable]

// Actions -- represent commands issued to the database

// create table "albums" ("artist" VARCHAR NOT NULL,"title" VARCHAR NOT NULL,"year" VARCHAR NOT NULL,"id" BIGINT NOT NULL PRIMARY KEY AUTO_INCREMENT)
val createTableAction: FixedSqlAction[Unit, NoStream, Schema] = AlbumTable.schema.create

// insert into "albums" ("artist","title","year") values (?,?,?)
val insertAlbumsAction: FixedSqlAction[Option[Int], NoStream, Write] = AlbumTable ++= Seq(
Album("Mark Knopfler", "Altamira", 2015),
Album("Mark Knopfler", "Privateering", 2012),
Album("Mark Knopfler", "Kill To Get Crimson", 2007),
Album("Mark Knopfler", "All The Roadrunning", 2006),
Album("Mark Knopfler", "Shangri-La", 2004),
Album("Mark Knopfler", "The Ragpicker's Dream", 2002),
Album("Mark Knopfler", "Sailing To Philadelphia", 2000),
Album("Mark Knopfler", "Wag The Dog", 1998),
Album("Mark Knopfler", "Golden Heart", 1996),
Album("Dire Straits", "Live At The BBC", 1995),
Album("Dire Straits", "On The Night", 1993),
Album("Dire Straits", "On Every Street", 1991),
Album("Dire Straits", "Brothers In Arms", 1985),
Album("Dire Straits", "Alchemy: Dire Straits Live", 1984),
Album("Dire Straits", "Love Over Gold", 1982),
Album("Dire Straits", "Making Movies", 1980),
Album("Dire Straits", "Communique", 1979),
Album("Dire Straits", "Dire Straits", 1978)
)

// select "artist", "title", "year", "id" from "albums"
val selectAlbumsActions: FixedSqlStreamingAction[Seq[Album], Album, Read] = AlbumTable.result

// Database --

private val db = Database.forConfig("scalaxdb")

// Application --

// db.run takes an Action, runs it against the database and gives us a Future[T]
// and is great when dealing with async code. Here we don't have async code but
// sync code so we will await here.
private def exec[T](action: DBIO[T]): T =
Await.result(db.run(action), 2.seconds)

exec(createTableAction)
exec(insertAlbumsAction)
exec(selectAlbumsActions).foreach(println)
}
2 changes: 2 additions & 0 deletions src/test/resources/application.conf
Expand Up @@ -32,6 +32,8 @@ akka {

# Default database configuration
slick.dbs.default.driver="slick.driver.H2Driver$"
//slick.dbs.default.driver = "slick.jdbc.PostgresProfile$"

slick.dbs.default.db.driver="org.h2.Driver"
slick.dbs.default.db.url="jdbc:h2:mem:play"
slick.dbs.default.db.maximumPoolSize=10
Expand Down
2 changes: 2 additions & 0 deletions src/test/scala/com/github/dnvriend/TestSpec.scala
Expand Up @@ -68,6 +68,8 @@ class TestSpec extends FlatSpec
val db = personRepository.database
val profile = personRepository.getProfile

println(s"====> Database Profile = $profile")

// ================================== Supporting Operations ====================================
implicit class PimpedByteArray(self: Array[Byte]) {
def getString: String = new String(self)
Expand Down
Expand Up @@ -13,7 +13,7 @@ class H2SnapshotQueriesTest extends TestSpec {
def insertSnapshotStatement(snapshotRow: SnapshotRow): String =
SnapshotTable.insertOrUpdate(snapshotRow).statements.head

def selectAll(persistenceId: String): Query[Snapshot, SnapshotRow, Seq] =
def selectAll(persistenceId: String) =
SnapshotTable.filter(_.persistenceId === persistenceId).sortBy(_.sequenceNumber.desc)

"DDL" should "generate create table statement" in {
Expand Down
Expand Up @@ -13,7 +13,7 @@ class OracleSnapshotQueriesTest extends TestSpec {
def insertSnapshotStatement(snapshotRow: SnapshotRow): String =
SnapshotTable.insertOrUpdate(snapshotRow).statements.head

def selectAll(persistenceId: String): Query[Snapshot, SnapshotRow, Seq] =
def selectAll(persistenceId: String) =
SnapshotTable.filter(_.persistenceId === persistenceId).sortBy(_.sequenceNumber.desc)

"DDL" should "generate create table statement" in {
Expand Down

0 comments on commit 11d9d2c

Please sign in to comment.