Permalink
Browse files

Require Java 8 and Scala 2.11

- Slick 3.2 will require Java 8 and Scala 2.11.

- Remove all warnings from the build, including ones not directly
  related to the dependency changes.

- Upgrade OSGi test environment (with plenty of follow-on changes in the
  `osgitests` project) because the old versions were incompatible with
  Java 8.

- Adapt macro usage to the 2.11 API. The only major change is in the
  `StaticDatabaseConfig` annotation lookup from the enclosing scope.
  `HList`, `Nat` and `tsql` require whitebox macros. All other macros
  are converted to blackbox.
  • Loading branch information...
1 parent 85f3eba commit 20aed4cab2c510c9b1baf6ebcc974df95ffd191f @szeiger szeiger committed Sep 29, 2015
View
@@ -1,7 +1,7 @@
language: scala
script: sbt -jvm-opts jvmopts.travis -Dslick.testkit-config=test-dbs/testkit.travis.conf +testAll
jdk:
- - openjdk6
+ - oraclejdk8
notifications:
flowdock:
secure: j3YP9TjiIcMRy2mvunF1AHBOFnz2H7mZAFVbHPBNkAjMCwSdBNvLpn33qv6ybr02c5snBDJTs0P70RJ/mh3YsqwnIeloQD9HUfnndKQD6ujxx1QWRI/lVDW4pfVRQEuPsXdW/3AiqxrSG5BS4thiyc3vj3LpnodHwNMUT+Nlmq0=
View
@@ -1,5 +1,4 @@
-Xmx3076M
--XX:MaxPermSize=1024M
-Xss2M
-XX:+DoEscapeAnalysis
-XX:+UseParallelGC
@@ -4,33 +4,31 @@ import org.junit.Test
import org.junit.runner.RunWith
import org.junit.Assert._
import org.ops4j.pax.exam
-import org.ops4j.pax.exam.junit.{Configuration, ExamReactorStrategy, JUnit4TestRunner}
-import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
+import org.ops4j.pax.exam.Configuration
+import org.ops4j.pax.exam.junit.PaxExam
+import org.ops4j.pax.exam.spi.reactors.{AllConfinedStagedReactorFactory, ExamReactorStrategy}
import slick.osgi.testutil._
import slick.util.GlobalConfig
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.concurrent.ExecutionContext.Implicits.global
-@RunWith(classOf[JUnit4TestRunner])
+@RunWith(classOf[PaxExam])
@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
class BasicTest extends SlickOsgiHelper {
-
- @Configuration
- def config(): Array[exam.Option] = {
- standardOptions
- }
+ @Configuration def config() = standardOptions
@Test
- def testPlainSQL: Unit = {
+ def testPlainSQL: Unit = wrap {
import slick.driver.H2Driver.api._
val a = sql"select {fn database()}".as[String].head.map(res => assertEquals("TEST-OSGI", res))
val db = Database.forURL("jdbc:h2:mem:test-osgi")
try Await.result(db.run(a), Duration.Inf) finally db.close
}
+
@Test
- def testConfig: Unit = {
+ def testConfig: Unit = wrap {
assertFalse(GlobalConfig.driverConfig("MySQL").isEmpty)
}
}
@@ -2,7 +2,9 @@ package slick.osgi.testutil
import org.ops4j.pax.exam.CoreOptions._
import org.ops4j.pax.exam
-import java.io.File
+import java.io.{StringWriter, PrintWriter, File}
+
+import scala.util.control.NonFatal
/**
* Helper to communicate with promoted bundles from our sbtbuild.
@@ -11,14 +13,16 @@ trait SlickOsgiHelper {
private def makeBundle(file: File): exam.Option =
bundle(file.toURI.toASCIIString)
-
private def allBundleFiles: Array[File] =
Option(sys.props("slick.osgi.bundlepath")).getOrElse("").split(":").map(new File(_))
- def standardOptions: Array[exam.Option] = {
- val bundles = (allBundleFiles map makeBundle)
- bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
- // to change the local repo used (for some operations, but not all -- which is why I didn't bother):
- // systemProperty("org.ops4j.pax.url.mvn.localRepository").value(sys.props("maven.repo.local")))
- }
+ def standardOptions: Array[exam.Option] =
+ allBundleFiles.map(makeBundle) ++ Array[exam.Option](junitBundles())
+
+ def wrap(f: => Unit): Unit =
+ try f catch { case NonFatal(t) =>
+ // Force all classes required for printing the exception to be loaded before the bundles are released
+ t.printStackTrace(new PrintWriter(new StringWriter()))
+ throw t
+ }
}
View
@@ -13,10 +13,10 @@ import de.johoop.testngplugin.TestNGPlugin._
object SlickBuild extends Build {
- val slickVersion = "3.1.0-RC3"
- val slickExtensionsVersion = "3.1.0-RC2" // Slick extensions version for links in the manual
- val binaryCompatSlickVersion = "3.1.0" // Slick base version for binary compatibility checks
- val scalaVersions = Seq("2.10.5", "2.11.6")
+ val slickVersion = "3.2.0-SNAPSHOT"
+ val slickExtensionsVersion = slickVersion // Slick extensions version for links in the manual
+ val binaryCompatSlickVersion = "3.2.0" // Slick base version for binary compatibility checks
+ val scalaVersions = Seq("2.11.7")
/** Dependencies for reuse in different parts of the build */
object Dependencies {
@@ -44,14 +44,14 @@ object SlickBuild extends Build {
"postgresql" % "postgresql" % "9.1-901.jdbc4",
"mysql" % "mysql-connector-java" % "5.1.23"
)
- val paxExamVersion = "2.6.0"
+ val paxExamVersion = "4.6.0"
val paxExam = Seq(
"org.ops4j.pax.exam" % "pax-exam-container-native" % paxExamVersion,
"org.ops4j.pax.exam" % "pax-exam-junit4" % paxExamVersion,
"org.ops4j.pax.exam" % "pax-exam-link-assembly" % paxExamVersion,
"org.ops4j.pax.url" % "pax-url-aether" % "1.6.0",
"org.ops4j.pax.swissbox" % "pax-swissbox-framework" % "1.5.1",
- "org.apache.felix" % "org.apache.felix.framework" % "3.2.2"
+ "org.apache.felix" % "org.apache.felix.framework" % "4.6.1"
)
}
@@ -99,7 +99,7 @@ object SlickBuild extends Build {
organizationName := "Typesafe",
organization := "com.typesafe.slick",
resolvers += Resolver.sonatypeRepo("snapshots"),
- scalacOptions ++= List("-deprecation", "-feature"),
+ scalacOptions ++= List("-deprecation", "-feature", "-unchecked"),
scalacOptions in (Compile, doc) <++= (version,sourceDirectory in Compile,name).map((v,src,n) => Seq(
"-doc-title", n,
"-doc-version", v,
@@ -39,10 +39,7 @@ object HikariCPJdbcDataSource extends JdbcDataSourceFactory {
hconf.setMaxLifetime(c.getMillisecondsOr("maxLifetime", 1800000))
hconf.setLeakDetectionThreshold(c.getMillisecondsOr("leakDetectionThreshold", 0))
hconf.setInitializationFailFast(c.getBooleanOr("initializationFailFast", false))
- c.getStringOpt("connectionTestQuery").foreach { s =>
- hconf.setJdbc4ConnectionTest(false)
- hconf.setConnectionTestQuery(s)
- }
+ c.getStringOpt("connectionTestQuery").foreach(hconf.setConnectionTestQuery)
c.getStringOpt("connectionInitSql").foreach(hconf.setConnectionInitSql)
val numThreads = c.getIntOr("numThreads", 20)
hconf.setMaximumPoolSize(c.getIntOr("maxConnections", numThreads * 5))
@@ -105,7 +105,7 @@ class ActionTest extends AsyncTest[RelationalTestDB] {
a1.map(_ shouldBe (1 to 5000).toSeq),
a2.map(_ shouldBe (1 to 20).toSeq),
a3.map(_ shouldBe (1 to 20).toSeq),
- a4.map(_ shouldBe ()),
+ a4.map(_ shouldBe (())),
a5.map(_ shouldBe "a5")
)
} else DBIO.successful(())
@@ -235,7 +235,7 @@ class JdbcMapperTest extends AsyncTest[JdbcTestDB] {
def productArity: Int = 2
def productElement(n: Int): Any = Seq(a, b)(n)
override def equals(a: Any) = a match {
- case that: C => this.a == that.a && this.b == that.b
+ case that: LiftedC => this.a == that.a && this.b == that.b
case _ => false
}
}
@@ -1,7 +1,8 @@
package com.typesafe.slick.testkit.util
import scala.language.experimental.macros
-import scala.reflect.macros.{Context, TypecheckException}
+import scala.reflect.macros.blackbox.Context
+import scala.reflect.macros.TypecheckException
import scala.util.control.NonFatal
import java.util.regex.Pattern
@@ -24,7 +25,7 @@ object ShouldNotTypecheck {
(Pattern.compile(s, Pattern.CASE_INSENSITIVE | Pattern.DOTALL), "Expected error matching: "+s)
}
- try ctx.typeCheck(ctx.parse("{ "+codeStr+" }")) catch { case e: TypecheckException =>
+ try ctx.typecheck(ctx.parse("{ "+codeStr+" }")) catch { case e: TypecheckException =>
val msg = e.getMessage
if((expected ne null) && !(expPat.matcher(msg)).matches)
ctx.abort(ctx.enclosingPosition, "Type-checking failed in an unexpected way.\n"+
@@ -1,6 +1,6 @@
package slick.util
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
import scala.reflect.NameTransformer
import scala.collection.mutable.ListBuffer
@@ -13,12 +13,12 @@ object MacroSupportInterpolationImpl {
val stringType = definitions.StringClass.toType
val symbolType = ctx.mirror.staticClass("slick.ast.Symbol").toType
- val skipParens = Ident(newTermName("skipParens"))
- val sqlBuilder = Ident(newTermName("sqlBuilder"))
- def quoteIdentifier(t: Tree) = Apply(Ident(newTermName("quoteIdentifier")), List(t))
- def symbolName(t: Tree) = Apply(Ident(newTermName("symbolName")), List(t))
- def toStr(t: Tree) = Apply(Select(Ident(definitions.StringClass.companionSymbol), newTermName("valueOf")), List(t))
- def append(t: Tree) = Apply(Select(sqlBuilder, newTermName("+=").encodedName), List(t))
+ val skipParens = Ident(TermName("skipParens"))
+ val sqlBuilder = Ident(TermName("sqlBuilder"))
+ def quoteIdentifier(t: Tree) = Apply(Ident(TermName("quoteIdentifier")), List(t))
+ def symbolName(t: Tree) = Apply(Ident(TermName("symbolName")), List(t))
+ def toStr(t: Tree) = Apply(Select(Ident(definitions.StringClass.companion), TermName("valueOf")), List(t))
+ def append(t: Tree) = Apply(Select(sqlBuilder, TermName("+=").encodedName), List(t))
def appendString(str: String): List[Tree] = {
val exprs = new ListBuffer[Tree]
@@ -38,41 +38,41 @@ object MacroSupportInterpolationImpl {
case c2 @ ('(' | ')') => // optional parentheses
flushSB
exprs += If(
- Select(skipParens, newTermName(NameTransformer.encode("unary_!"))),
+ Select(skipParens, TermName(NameTransformer.encode("unary_!"))),
append(Literal(Constant(c2))),
ctx.universe.EmptyTree
)
case '{' => // optional open parentheses with indent
flushSB
exprs += If(
- Select(skipParens, newTermName(NameTransformer.encode("unary_!"))),
+ Select(skipParens, TermName(NameTransformer.encode("unary_!"))),
Block(List(
append(Literal(Constant('('))),
- Select(sqlBuilder, newTermName("newLineIndent"))
+ Select(sqlBuilder, TermName("newLineIndent"))
), Literal(Constant(()))),
ctx.universe.EmptyTree
)
case '}' => // optional close parentheses with dedent
flushSB
exprs += If(
- Select(skipParens, newTermName(NameTransformer.encode("unary_!"))),
+ Select(skipParens, TermName(NameTransformer.encode("unary_!"))),
Block(List(
- Select(sqlBuilder, newTermName("newLineDedent")),
+ Select(sqlBuilder, TermName("newLineDedent")),
append(Literal(Constant(')')))
), Literal(Constant(()))),
ctx.universe.EmptyTree
)
case '[' => // open parenthesis with indent
sb append '('
flushSB
- exprs += Select(sqlBuilder, newTermName("newLineIndent"))
+ exprs += Select(sqlBuilder, TermName("newLineIndent"))
case ']' => // close parenthesis with dedent
flushSB
- exprs += Select(sqlBuilder, newTermName("newLineDedent"))
+ exprs += Select(sqlBuilder, TermName("newLineDedent"))
sb append ')'
case 'n' =>
flushSB
- exprs += Select(sqlBuilder, newTermName("newLineOrSpace"))
+ exprs += Select(sqlBuilder, TermName("newLineOrSpace"))
case c2 =>
ctx.abort(ctx.enclosingPosition, "Invalid escaped character '"+c2+"' in literal \""+str+"\"")
}
@@ -107,7 +107,7 @@ object MacroSupportInterpolationImpl {
ctx.abort(ae.tree.pos, "Unknown type. Must be Node or Symbol.")
case '!' =>
exprs ++= appendString(s.substring(0, len-1))
- exprs += Apply(Ident(newTermName("expr")), List(a, Literal(Constant(true))))
+ exprs += Apply(Ident(TermName("expr")), List(a, Literal(Constant(true))))
case _ =>
exprs ++= appendString(s)
//println("### tpe: "+ae.actualType)
@@ -119,7 +119,7 @@ object MacroSupportInterpolationImpl {
else if(ae.actualType <:< definitions.AnyValTpe)
append(toStr(a))
else if(ae.actualType <:< nodeType)
- Apply(Ident(newTermName("expr")), List(a, Literal(Constant(false))))
+ Apply(Ident(TermName("expr")), List(a, Literal(Constant(false))))
else
ctx.abort(ae.tree.pos, "Unknown type. Must be Node, String or AnyVal.")
)
@@ -563,6 +563,7 @@ object Path {
def apply(l: List[TermSymbol]): PathElement = l match {
case s :: Nil => Ref(s)
case s :: l => Select(apply(l), s)
+ case _ => throw new SlickException("Empty Path")
}
def unapply(n: PathElement): Option[List[TermSymbol]] = {
var l = new ListBuffer[TermSymbol]
@@ -231,7 +231,7 @@ trait DatabaseComponent { self =>
def highPriority = highPrio
def run: Unit =
try {
- ctx.sync
+ ctx.readSync
val res = try {
acquireSession(ctx)
val res = try a.run(ctx) catch { case NonFatal(ex) =>
@@ -264,7 +264,7 @@ trait DatabaseComponent { self =>
def run: Unit = try {
val debug = streamLogger.isDebugEnabled
var state = initialState
- ctx.sync
+ ctx.readSync
if(state eq null) acquireSession(ctx)
var demand = ctx.demandBatch
var realDemand = if(demand < 0) demand - Long.MinValue else demand
@@ -364,6 +364,8 @@ trait DatabaseComponent { self =>
* synchronous execution. */
@volatile private[DatabaseComponent] var sync = 0
+ private[DatabaseComponent] def readSync = sync // workaround for SI-9053 to avoid warnings
+
private[DatabaseComponent] var currentSession: Session = null
/** Used for the sequence counter in Action debug output. This variable is volatile because it
@@ -440,7 +442,7 @@ trait DatabaseComponent { self =>
/** Restart a suspended streaming action. Must only be called from the Subscriber context. */
def restartStreaming: Unit = {
- sync
+ readSync
val s = streamState
if(s ne null) {
streamState = null
@@ -7,7 +7,7 @@ import scala.language.experimental.macros
import java.net.{URL, URI}
import scala.annotation.{StaticAnnotation, Annotation}
import scala.reflect.ClassTag
-import scala.reflect.macros.Context
+import scala.reflect.macros.blackbox.Context
import scala.util.control.NonFatal
import slick.SlickException
import slick.profile.BasicProfile
@@ -118,16 +118,14 @@ object StaticDatabaseConfigMacros {
private[slick] def getURI(c: Context): String = {
import c.universe._
- def findUri(ann: Seq[Tree]): Option[String] =
- ann.map(c.typeCheck(_, pt = weakTypeOf[StaticDatabaseConfig], silent = true)).collectFirst {
+ def findUri(ann: Seq[c.universe.Annotation]): Option[String] =
+ ann.map(a => c.typecheck(a.tree, pt = weakTypeOf[StaticDatabaseConfig], silent = true)).collectFirst {
case Apply(Select(_, _), List(Literal(Constant(uri: String)))) => uri
}
- val methConf = Option(c.enclosingMethod).filter(_ != EmptyTree).map(_.asInstanceOf[MemberDef])
- .flatMap(md => findUri(md.mods.annotations))
- val classConf = findUri(c.enclosingClass.asInstanceOf[MemberDef].mods.annotations)
- methConf.orElse(classConf).getOrElse(
- c.abort(c.enclosingPosition, "No @StaticDatabaseConfig annotation found in enclosing scope"))
+ val scopes = Iterator.iterate(c.internal.enclosingOwner)(_.owner).takeWhile(_ != NoSymbol)
+ val uriOpt = scopes.map(s => findUri(s.annotations)).find(_.isDefined).flatten
+ uriOpt.getOrElse(c.abort(c.enclosingPosition, "No @StaticDatabaseConfig annotation found in enclosing scope"))
}
def getImpl[P <: BasicProfile : c.WeakTypeTag](c: Context)(ct: c.Expr[ClassTag[P]]): c.Expr[DatabaseConfig[P]] = {
@@ -3,7 +3,7 @@ package slick.collection.heterogeneous
import scala.language.higherKinds
import scala.language.experimental.macros
import scala.annotation.unchecked.{uncheckedVariance => uv}
-import scala.reflect.macros.Context
+import scala.reflect.macros.whitebox.Context
import slick.lifted.{MappedScalaProductShape, Shape, ShapeLevel}
import scala.reflect.ClassTag
@@ -149,7 +149,7 @@ final object HListMacros{
ctx.Expr(
Apply(
TypeApply(
- Select(ctx.prefix.tree, newTermName("_unsafeApply")),
+ Select(ctx.prefix.tree, TermName("_unsafeApply")),
List(tt)
),
List(t)
@@ -2,7 +2,7 @@ package slick.collection.heterogeneous
import scala.language.higherKinds
import scala.language.experimental.macros
-import scala.reflect.macros.Context
+import scala.reflect.macros.whitebox.Context
/** Natural numbers for indexing in HLists.
*
@@ -140,7 +140,7 @@ object Nat {
def apply(i: Int): Nat = macro Nat.applyImpl
def applyImpl(ctx: Context)(i: ctx.Expr[Int]): ctx.Expr[Nat] = {
import ctx.universe._
- val _Nat = typeOf[Nat.type].typeSymbol.companionSymbol
+ val _Nat = typeOf[Nat].typeSymbol.companion
val _Succ = typeOf[Succ[_]].typeSymbol
val _Zero = reify(Zero).tree
@@ -152,7 +152,7 @@ object Nat {
ctx.Expr(
Apply(
TypeApply(
- Select(Ident(_Nat), newTermName("_unsafe")),
+ Select(Ident(_Nat), TermName("_unsafe")),
List(tt)),
List(Literal(Constant(v)))))
case _ => reify(Nat._unsafe[Nat](i.splice))
Oops, something went wrong.

1 comment on commit 20aed4c

@brettwooldridge

@szeiger Just so you slick guys know, HikariCP is basically waiting on you (Slick 3.2) to drop JDK 7 support from the mainline.

Please sign in to comment.