Skip to content
Permalink
Browse files

Improved documentation:

- A chapter on Slick Extensions
- More lifted embedding features: Deleting, Inserting, Updating,
  Query Templates, User-Defined Functions and Types
- Structured list of supported primitive types
- Correct slick artifact spec in gettingstarted.rst
- Better auto-generated names for inner classes in apilinks.py
- Link to ScalaQuery for Scala 2.9 in introduction.rst (fixes issue #68)
  • Loading branch information
szeiger committed Jan 10, 2013
1 parent 57af0c8 commit 829053c246845b2c0f31bd2bc1feb0cab1a9f29c
@@ -2,6 +2,7 @@ package scala.slick.docsnippets

import scala.slick.driver.H2Driver.simple._
import Database.threadLocalSession
import java.sql.Date

class LiftedEmbedding {

@@ -76,15 +77,20 @@ class LiftedEmbedding {
//#reptypes

//#mappedtable
//#insert2
case class User(id: Option[Int], first: String, last: String)

object Users extends Table[User]("users") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def first = column[String]("first")
def last = column[String]("last")
def * = id.? ~ first ~ last <> (User, User.unapply _)
//#mappedtable
def forInsert = first ~ last <> ({ t => User(None, t._1, t._2)}, { (u: User) => Some((u.first, u.last))})
//#mappedtable
}
//#mappedtable
//#insert2

//#index
//#primarykey
@@ -148,8 +154,16 @@ class LiftedEmbedding {
val l = q.list
val v = q.to[Vector]
val invoker = q.invoker
val statement = q.selectStatement
//#invoker
}
{
//#delete
val affectedRowsCount = q.delete
val invoker = q.deleteInvoker
val statement = q.deleteStatement
//#delete
}

{
val session = threadLocalSession
@@ -171,4 +185,113 @@ class LiftedEmbedding {
}
//#aggregation3
}

db withSession {
//#insert1
Coffees.insert("Colombian", 101, 7.99, 0, 0)

Coffees.insertAll(
("French_Roast", 49, 8.99, 0, 0),
("Espresso", 150, 9.99, 0, 0)
)

// "sales" and "total" will use the default value 0:
(Coffees.name ~ Coffees.supID ~ Coffees.price).insert("Colombian_Decaf", 101, 8.99)

val statement = Coffees.insertStatement
val invoker = Coffees.insertInvoker
//#insert1

//#insert2

Users.forInsert insert User(None, "Christopher", "Vogt")
//#insert2

//#insert3
val userId =
Users.forInsert returning Users.id insert User(None, "Stefan", "Zeiger")
//#insert3

//#insert4
object Users2 extends Table[(Int, String)]("users2") {
def id = column[Int]("id", O.PrimaryKey)
def name = column[String]("name")
def * = id ~ name
}

Users2.ddl.create

Users2 insert (Users.map { u => (u.id, u.first ++ " " ++ u.last) })

Users2 insertExpr (Query(Users).length + 1, "admin")
//#insert4
}

db withSession {
//#update1
val q = for { c <- Coffees if c.name === "Espresso" } yield c.price
q.update(10.49)

val statement = q.updateStatement
val invoker = q.updateInvoker
//#update1
}

db withSession {
//#template1
val userNameByID = for {
id <- Parameters[Int]
u <- Users if u.id is id
} yield u.first

val name = userNameByID(2).first

val userNameByIDRange = for {
(min, max) <- Parameters[(Int, Int)]
u <- Users if u.id >= min && u.id < max
} yield u.first

val names = userNameByIDRange(2, 5).list
//#template1
}

db withSession {
object SalesPerDay extends Table[(Date, Int)]("SALES_PER_DAY") {
def day = column[Date]("DAY", O.PrimaryKey)
def count = column[Int]("COUNT")
def * = day ~ count
}

//#simplefunction1
// H2 has a day_of_week() function which extracts the day of week from a timestamp
val dayOfWeek = SimpleFunction.unary[Date, Int]("day_of_week")

// Use the lifted function in a query to group by day of week
val q1 = for {
(dow, q) <- SalesPerDay.map(s => (dayOfWeek(s.day), s.count)).groupBy(_._1)
} yield (dow, q.map(_._2).sum)
//#simplefunction1

//#simplefunction2
def dayOfWeek2(c: Column[Date]) =
SimpleFunction("day_of_week")(TypeMapper.IntTypeMapper)(Seq(c))
//#simplefunction2
}

db withSession {
//#mappedtype1
// An algebraic data type for booleans
sealed trait Bool
case object True extends Bool
case object False extends Bool

// And a TypeMapper that maps it to Int values 1 and 0
implicit val boolTypeMapper = MappedTypeMapper.base[Bool, Int](
{ b => if(b == True) 1 else 0 }, // map Bool to Int
{ i => if(i == 1) True else False } // map Int to Bool
)

// You can now use Bool like any built-in column type (in tables, queries, etc.)
//#mappedtype1
}
}
@@ -0,0 +1,17 @@
Slick Extensions
================

Slick drivers for Oracle (``com.typesafe.slick.driver.oracle.OracleDriver``)
and DB2 (``com.typesafe.slick.driver.db2.DB2Driver``) are available in
*slick-extensions*, a closed-source package package with commercial support
provided by Typesafe, Inc. It is made available under the terms and conditions
of the `Typesafe Subscription Agreement`_ (PDF).

If you are using sbt_, you can add *slick-extensions* and the Typesafe
repository (which contains the required artifacts) to your build definition
like this::

// Use the right Slick version here:
libraryDependencies += "com.typesafe" %% "slick-extensions" % "1.0.0"

resolvers += "Typesafe Releases" at "http://repo.typesafe.com/typesafe/maven-releases/"
@@ -24,6 +24,8 @@ def role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
if not has_explicit_title:
idents = part.split(".")
title = idents[len(idents)-1]
idents = title.split("$")
title = idents[len(idents)-1]
pnode = nodes.reference(title, title, internal=False, refuri=full_url)
return [pnode], []
return role
@@ -15,7 +15,7 @@ main ``build.sbt`` file::

libraryDependencies ++= List(
// use the right Scala and Slick versions here:
"com.typesafe" %% "slick_2.10" % "1.0.0",
"com.typesafe" %% "slick" % "1.0.0",
"org.slf4j" % "slf4j-nop" % "1.6.4",
"com.h2database" % "h2" % "1.3.166"
)
@@ -10,3 +10,4 @@ Slick - Scala Language Integrated Connection Kit
direct-embedding
sql
testkit
extensions
@@ -19,6 +19,9 @@ It includes the following features:
* Simple execution of raw SQL queries
* Session management based on JDBC Connections

Slick requires Scala 2.10. ScalaQuery_, the predecessor of Slick, is available
for Scala 2.9.

.. _supported-dbs:

Supported database systems
@@ -27,12 +30,14 @@ Supported database systems
The following database systems are directly supported for type-safe queries
(with lifted and direct embedding):

* DB2 (via :doc:`slick-extensions <extensions>`)
* Derby/JavaDB
* H2
* HSQLDB/HyperSQL
* Microsoft Access
* Microsoft SQL Server
* MySQL
* Oracle (via :doc:`slick-extensions <extensions>`)
* PostgreSQL
* SQLite

@@ -45,23 +45,13 @@ Scala type and a column name for the database (usually in upper-case). The
following primitive types are supported out of the box (with certain
limitations imposed by the individual database drivers):

- *Numeric types*: Byte, Short, Int, Long, BigDecimal, Float, Double
- *LOB types*: java.sql.Blob, java.sql.Clob, Array[Byte]
- *Date types*: java.sql.Date, java.sql.Time, java.sql.Timestamp
- Boolean
- java.sql.Blob
- Byte
- Array[Byte]
- java.sql.Clob
- java.sql.Date
- Double
- Float
- Int
- Long
- Short
- String
- java.sql.Time
- java.sql.Timestamp
- Unit
- java.util.UUID
- BigDecimal

Nullable columns are represented by ``Option[T]`` where ``T`` is one of the
supported primitive types. Note that all operations on Option values are
@@ -310,11 +300,127 @@ If you only want a single result value, you can use ``first`` or
used to iterate over the result set without first copying all data into a
Scala collection.

Inserting and Updating
----------------------
Deleting
--------

Deleting works very similarly to querying. You write a query which selects the
rows to delete and then call the ``delete`` method on it. There is again an
implicit conversion from ``Query`` to the special
:api:`scala.slick.driver.BasicInvokerComponent$DeleteInvoker` which provides
the ``delete`` method and a self-reference ``deleteInvoker``:

.. includecode:: code/LiftedEmbedding.scala#delete

A query for deleting must only select from a single table. Any projection is
ignored (it always deletes full rows).

Inserting
---------

Inserts are done based on a projection of columns from a single table. When
you use the table directly, the insert is performed against its ``*``
projection. Omitting some of a table's columns when inserting causes the
database to use the default values specified in the table definition, or
a type-specific default in case no explicit default was given. All methods
for inserting are defined in
:api:`scala.slick.driver.BasicInvokerComponent$InsertInvoker` and
:api:`scala.slick.driver.BasicInvokerComponent$FullInsertInvoker`.

.. includecode:: code/LiftedEmbedding.scala#insert1

While some database systems allow inserting proper values into AutoInc columns
or inserting ``None`` to get a created value, most databases forbid this
behaviour, so you have to make sure to omit these columns. Slick does not yet
have a feature to do this automatically but it is planned for a future
release. For now, you have to use a projection which does not include the
AutoInc column, like ``forInsert`` in the following example:

.. includecode:: code/LiftedEmbedding.scala#insert2

In these cases you frequently want to get back the auto-generated primary key
column. By default, ``insert`` gives you a count of the number of affected
rows (which will usually be 1) and ``insertAll`` gives you an accumulated
count in an ``Option`` (which can be ``None`` if the database system does not
provide counts for all rows). This can be changed with the ``returning``
method where you specify the columns to be returned (as a single value or
tuple from ``insert`` and a ``Seq`` of such values from ``insertAll``):

.. includecode:: code/LiftedEmbedding.scala#insert3

Note that many database systems only allow a single column to be returned
which must be the table's auto-incrementing primary key. If you ask for
other columns a ``SlickException`` is thrown at runtime (unless the database
actually supports it).

Instead of inserting data from the client side you can also insert data
created by a ``Query`` or a scalar expression that is executed in the
database server:

.. includecode:: code/LiftedEmbedding.scala#insert4

Updating
--------

Updates are performed by writing a query that selects the data to update and
then replacing it with new data. The query must only return raw columns (no
computed values) selected from a single table. The relevant methods for
updating are defined in
:api:`scala.slick.driver.BasicInvokerComponent$UpdateInvoker`.

.. includecode:: code/LiftedEmbedding.scala#update1

There is currently no way to use scalar expressions or transformations of
the existing data in the database for updates.

Query Templates
---------------

Query templates are parameterized queries. A template works like a function
that takes some parameters and returns a ``Query`` for them except that the
template is more efficient. When you evaluate a function to create a query
the function constructs a new query AST, and when you execute that query it
has to be compiled anew by the query compiler every time even if that always
results in the same SQL string. A query template on the other hand is limited
to a single SQL string (where all parameters are turned into bind
variables) by design but the query is built and compiled only once.

You can create a query template by calling ``flatMap`` on a
:api:`scala.slick.lifted.Parameters` object. In many cases this enables you
to write a single *for comprehension* for a template.

.. includecode:: code/LiftedEmbedding.scala#template1

User-Defined Functions and Types
--------------------------------

If your database system supports a scalar function that is not available as
a method in Slick you can define it as a
:api:`scala.slick.lifted.SimpleFunction`. There are predefined methods for
creating unary, binary and ternary functions with fixed parameter and return
types.

.. includecode:: code/LiftedEmbedding.scala#simplefunction1

If you need more flexibility regarding the types (e.g. for varargs,
polymorphic functions, or to support Option and non-Option types in a single
function), you can use ``SimpleFunction.apply`` to get an untyped instance and
write your own wrapper function with the proper type-checking:

.. includecode:: code/LiftedEmbedding.scala#simplefunction2

:api:`scala.slick.lifted.SimpleBinaryOperator` and
:api:`scala.slick.lifted.SimpleLiteral` work in a similar way. For even more
flexibility (e.g. function-like expressions with unusual syntax), you can
use :api:`scala.slick.lifted.SimpleExpression`.

If you need a custom column type you can implement
:api:`scala.slick.lifted.TypeMapper` and
:api:`scala.slick.lifted.TypeMapperDelegate`. The most common scenario is
mapping an application-specific type to an already supported type in the
database. This can be done much simpler by using a
:api:`scala.slick.lifted.MappedTypeMapper` which takes care of all the
boilerplate:

.. includecode:: code/LiftedEmbedding.scala#mappedtype1

You can also subclass ``MappedTypeMapper`` for a bit more flexibility.
@@ -3,3 +3,5 @@
.. _Logback: http://logback.qos.ch/
.. _H2: http://h2database.com/
.. _JDBC: http://en.wikipedia.org/wiki/Java_Database_Connectivity
.. _Typesafe Subscription Agreement: http://typesafe.com/public/legal/TypesafeSubscriptionAgreement-v1.pdf
.. _ScalaQuery: http://scalaquery.org

0 comments on commit 829053c

Please sign in to comment.
You can’t perform that action at this time.