Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Complete supporting Scala 2.13.0-M4 #887

Merged
merged 1 commit into from
May 27, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 3 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,5 +54,7 @@ matrix:
env: SCALIKEJDBC_DATABASE="h2"
- scala: 2.12.6
env: SCALIKEJDBC_DATABASE="hsqldb"
- scala: 2.13.0-M3
- scala: 2.13.0-M4
env: SCALIKEJDBC_DATABASE="mysql"
script:
- sbt "++ ${TRAVIS_SCALA_VERSION}! -v" config/compile syntax-support-macro/compile streams/compile mapper-generator-core/compile
52 changes: 34 additions & 18 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,11 @@ lazy val baseSettings = Seq(
publishTo := _publishTo(version.value),
publishMavenStyle := true,
resolvers ++= _resolvers,
resolvers ++= PartialFunction.condOpt(CrossVersion.partialVersion(scalaVersion.value)) {
case Some((2, 13)) =>
// TODO remove when parser-combinators available maven central https://github.com/scala/scala-parser-combinators/issues/151
"staging" at "https://oss.sonatype.org/content/repositories/staging"
}.toList,
// https://github.com/sbt/sbt/issues/2217
fullResolvers ~= { _.filterNot(_.name == "jcenter") },
transitiveClassifiers in Global := Seq(Artifact.SourceClassifier),
Expand Down Expand Up @@ -111,7 +116,7 @@ lazy val scalikejdbcJodaTime = Project(
baseSettings,
mimaSettings,
name := "scalikejdbc-joda-time",
libraryDependencies ++= scalaTestDependenciesInTestScope(scalatestVersion.value),
libraryDependencies ++= scalaTestDependenciesInTestScope.value,
libraryDependencies ++= Seq(
"org.mockito" % "mockito-core" % mockitoVersion % "test",
"joda-time" % "joda-time" % "2.9.9",
Expand All @@ -131,7 +136,7 @@ lazy val scalikejdbcLibrary = Project(
baseSettings,
mimaSettings,
name := "scalikejdbc",
libraryDependencies ++= scalaTestDependenciesInTestScope(scalatestVersion.value) ++
libraryDependencies ++= scalaTestDependenciesInTestScope.value ++
Seq("com.h2database" % "h2" % _h2Version % "test")
).dependsOn(scalikejdbcCore, scalikejdbcInterpolation).disablePlugins(ScriptedPlugin)

Expand Down Expand Up @@ -166,6 +171,7 @@ lazy val scalikejdbcCore = Project(
"org.apache.commons" % "commons-dbcp2" % "2.3.0" % "compile",
"org.slf4j" % "slf4j-api" % _slf4jApiVersion % "compile",
"org.scala-lang.modules" %% "scala-parser-combinators" % parserCombinatorsVersion.value % "compile",
"org.scala-lang.modules" %% "scala-collection-compat" % "0.1.1",
// scope: provided
"commons-dbcp" % "commons-dbcp" % "1.4" % "provided",
"com.jolbox" % "bonecp" % "0.8.0.RELEASE" % "provided",
Expand All @@ -174,7 +180,7 @@ lazy val scalikejdbcCore = Project(
"ch.qos.logback" % "logback-classic" % _logbackVersion % "test",
"org.hibernate" % "hibernate-core" % _hibernateVersion % "test",
"org.mockito" % "mockito-core" % mockitoVersion % "test"
) ++ scalaTestDependenciesInTestScope(scalatestVersion.value) ++ jdbcDriverDependenciesInTestScope
) ++ scalaTestDependenciesInTestScope.value ++ jdbcDriverDependenciesInTestScope
}
).enablePlugins(BuildInfoPlugin).disablePlugins(ScriptedPlugin)

Expand All @@ -190,7 +196,7 @@ lazy val scalikejdbcInterpolationMacro = Project(
Seq(
"org.scala-lang" % "scala-reflect" % scalaVersion.value % "compile",
"org.scala-lang" % "scala-compiler" % scalaVersion.value % "optional"
) ++ scalaTestDependenciesInTestScope(scalatestVersion.value)
) ++ scalaTestDependenciesInTestScope.value
}
).dependsOn(scalikejdbcCore).disablePlugins(ScriptedPlugin)

Expand All @@ -207,7 +213,7 @@ lazy val scalikejdbcInterpolation = Project(
"org.slf4j" % "slf4j-api" % _slf4jApiVersion % "compile",
"ch.qos.logback" % "logback-classic" % _logbackVersion % "test",
"org.hibernate" % "hibernate-core" % _hibernateVersion % "test"
) ++ scalaTestDependenciesInTestScope(scalatestVersion.value) ++ jdbcDriverDependenciesInTestScope
) ++ scalaTestDependenciesInTestScope.value ++ jdbcDriverDependenciesInTestScope
}
).dependsOn(scalikejdbcCore, scalikejdbcInterpolationMacro).disablePlugins(ScriptedPlugin)

Expand All @@ -222,7 +228,7 @@ lazy val scalikejdbcMapperGeneratorCore = Project(
name := "scalikejdbc-mapper-generator-core",
libraryDependencies ++= {
Seq("org.slf4j" % "slf4j-api" % _slf4jApiVersion % "compile") ++
scalaTestDependenciesInTestScope(scalatestVersion.value) ++
scalaTestDependenciesInTestScope.value ++
jdbcDriverDependenciesInTestScope
}
).dependsOn(scalikejdbcLibrary).disablePlugins(ScriptedPlugin)
Expand Down Expand Up @@ -257,8 +263,8 @@ lazy val scalikejdbcMapperGenerator = Project(
name := "scalikejdbc-mapper-generator",
libraryDependencies ++= {
Seq("org.slf4j" % "slf4j-simple" % _slf4jApiVersion % "compile") ++
scalaTestDependenciesInTestScope(scalatestVersion.value) ++
specs2DependenciesInTestScope(specs2Version.value) ++
scalaTestDependenciesInTestScope.value ++
specs2DependenciesInTestScope.value ++
jdbcDriverDependenciesInTestScope
}
).dependsOn(scalikejdbcCore, scalikejdbcMapperGeneratorCore)
Expand Down Expand Up @@ -296,7 +302,7 @@ lazy val scalikejdbcConfig = Project(
"com.typesafe" % "config" % _typesafeConfigVersion % "compile",
"org.slf4j" % "slf4j-api" % _slf4jApiVersion % "compile",
"ch.qos.logback" % "logback-classic" % _logbackVersion % "test"
) ++ scalaTestDependenciesInTestScope(scalatestVersion.value) ++ jdbcDriverDependenciesInTestScope
) ++ scalaTestDependenciesInTestScope.value ++ jdbcDriverDependenciesInTestScope
}
).dependsOn(scalikejdbcCore).disablePlugins(ScriptedPlugin)

Expand All @@ -315,7 +321,7 @@ lazy val scalikejdbcStreams = Project(
"ch.qos.logback" % "logback-classic" % _logbackVersion % "test",
"org.reactivestreams" % "reactive-streams-tck" % _reactiveStreamsVersion % "test",
"org.reactivestreams" % "reactive-streams-examples" % _reactiveStreamsVersion % "test"
) ++ scalaTestDependenciesInTestScope(scalatestVersion.value) ++ jdbcDriverDependenciesInTestScope
) ++ scalaTestDependenciesInTestScope.value ++ jdbcDriverDependenciesInTestScope
},
unmanagedSourceDirectories in Compile += {
CrossVersion.partialVersion(scalaVersion.value) match {
Expand All @@ -338,7 +344,7 @@ lazy val scalikejdbcSyntaxSupportMacro = Project(
Seq(
"ch.qos.logback" % "logback-classic" % _logbackVersion % "test",
"org.hibernate" % "hibernate-core" % _hibernateVersion % "test"
) ++ scalaTestDependenciesInTestScope(scalatestVersion.value) ++ jdbcDriverDependenciesInTestScope
) ++ scalaTestDependenciesInTestScope.value ++ jdbcDriverDependenciesInTestScope
}
).dependsOn(scalikejdbcLibrary).disablePlugins(ScriptedPlugin)

Expand All @@ -352,15 +358,25 @@ val _resolvers = Seq(
"sonatype releases" at "https://oss.sonatype.org/content/repositories/releases",
"sonatype snaphots" at "https://oss.sonatype.org/content/repositories/snapshots"
)
def scalaTestDependenciesInTestScope(v: String) =
Seq("org.scalatest" %% "scalatest" % v % "test")
lazy val scalaTestDependenciesInTestScope = Def.setting {
if (scalaVersion.value == "2.13.0-M4") {
// TODO https://github.com/scalatest/scalatest/issues/1367
Nil
} else {
Seq("org.scalatest" %% "scalatest" % scalatestVersion.value % "test")
}
}

def specs2DependenciesInTestScope(v: String) =
Seq(
("org.specs2" %% "specs2-core" % v % "test").excludeAll(
ExclusionRule(organization = "org.spire-math")
lazy val specs2DependenciesInTestScope = Def.setting{
if (scalaVersion.value == "2.13.0-M4") {
// TODO specs2 for Scala 2.13
Nil
} else {
Seq(
"org.specs2" %% "specs2-core" % specs2Version.value % "test"
)
)
}
}

val jdbcDriverDependenciesInTestScope = Seq(
"com.h2database" % "h2" % _h2Version % "test",
Expand Down
8 changes: 4 additions & 4 deletions project/GenerateOneToManies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ s"""/*
package scalikejdbc

import scala.collection.mutable.LinkedHashMap
import scala.collection.generic.CanBuildFrom
import scala.collection.compat._
import scala.language.higherKinds

private[scalikejdbc] trait OneToManies${n}Extractor[$A, $bs, E <: WithExtractor, Z]
Expand Down Expand Up @@ -74,7 +74,7 @@ s" to$i.map(t => Vector(t)).getOrElse(Vector.empty)"

private[scalikejdbc] def toTraversable(session: DBSession, sql: String, params: scala.collection.Seq[_], zExtractor: (A, $seq) => Z): Traversable[Z] = {
val attributesSwitcher = createDBSessionAttributesSwitcher()
DBSessionWrapper(session, attributesSwitcher).foldLeft(statement, rawParameters.toSeq: _*)(LinkedHashMap[A, ($seq)]())(processResultSet).map {
DBSessionWrapper(session, attributesSwitcher).foldLeft(statement, rawParameters.toSeq: _*)(LinkedHashMap[A, ($seq)]())(processResultSet _).map {
case (one, (${(1 to n).map("t" + _).mkString(", ")})) => zExtractor(one, ${(1 to n).map("t" + _).mkString(", ")})
}
}
Expand Down Expand Up @@ -154,8 +154,8 @@ final class OneToManies${n}SQLToCollection[A, $bs, E <: WithExtractor, Z] privat

import GeneralizedTypeConstraintsForWithExtractor._

override def apply[C[_]]()(implicit session: DBSession, context: ConnectionPoolContext = NoConnectionPoolContext, hasExtractor: ThisSQL =:= SQLWithExtractor, cbf: CanBuildFrom[Nothing, Z, C[Z]]): C[Z] = {
executeQuery(session, (session: DBSession) => toTraversable(session, statement, rawParameters, zExtractor).to[C])
override def apply[C[_]]()(implicit session: DBSession, context: ConnectionPoolContext = NoConnectionPoolContext, hasExtractor: ThisSQL =:= SQLWithExtractor, f: Factory[Z, C[Z]]): C[Z] = {
executeQuery(session, (session: DBSession) => f.fromSpecific(toTraversable(session, statement, rawParameters, zExtractor)))
}

$extractOne
Expand Down
28 changes: 14 additions & 14 deletions scalikejdbc-core/src/main/scala/scalikejdbc/DBSession.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package scalikejdbc

import java.sql._
import util.control.Exception._
import scala.collection.generic.CanBuildFrom
import scala.collection.compat._
import scala.language.higherKinds

/**
Expand Down Expand Up @@ -305,11 +305,11 @@ trait DBSession extends LogSupport with LoanPattern with AutoCloseable {
* @tparam C return collection type
* @return result as C[A]
*/
def collection[A, C[_]](template: String, params: Any*)(extract: WrappedResultSet => A)(implicit cbf: CanBuildFrom[Nothing, A, C[A]]): C[A] = {
def collection[A, C[_]](template: String, params: Any*)(extract: WrappedResultSet => A)(implicit f: Factory[A, C[A]]): C[A] = {
using(createStatementExecutor(conn, template, params)) {
executor =>
val proxy = new DBConnectionAttributesWiredResultSet(executor.executeQuery(), connectionAttributes)
new ResultSetIterator(proxy).map(extract).to[C]
f.fromSpecific(new ResultSetIterator(proxy).map(extract))
}
}

Expand Down Expand Up @@ -671,7 +671,7 @@ trait DBSession extends LogSupport with LoanPattern with AutoCloseable {
* @param paramsList list of parameters
* @return count list
*/
def batch[C[_]](template: String, paramsList: scala.collection.Seq[Any]*)(implicit cbf: CanBuildFrom[Nothing, Int, C[Int]]): C[Int] = {
def batch[C[_]](template: String, paramsList: scala.collection.Seq[Any]*)(implicit f: Factory[Int, C[Int]]): C[Int] = {
batchInternal[C, Int](
template = template,
paramsList = paramsList,
Expand All @@ -684,7 +684,7 @@ trait DBSession extends LogSupport with LoanPattern with AutoCloseable {
* @param paramsList list of parameters
* @return count list
*/
def largeBatch[C[_]](template: String, paramsList: scala.collection.Seq[Any]*)(implicit cbf: CanBuildFrom[Nothing, Long, C[Long]]): C[Long] =
def largeBatch[C[_]](template: String, paramsList: scala.collection.Seq[Any]*)(implicit f: Factory[Long, C[Long]]): C[Long] =
batchInternal[C, Long](
template = template,
paramsList = paramsList,
Expand All @@ -693,10 +693,10 @@ trait DBSession extends LogSupport with LoanPattern with AutoCloseable {
private[this] def batchInternal[C[_], A](
template: String,
paramsList: scala.collection.Seq[scala.collection.Seq[Any]],
execute: StatementExecutor => scala.Array[A])(implicit cbf: CanBuildFrom[Nothing, A, C[A]]): C[A] = {
execute: StatementExecutor => scala.Array[A])(implicit f: Factory[A, C[A]]): C[A] = {
ensureNotReadOnlySession(template)
paramsList match {
case Nil => Seq.empty[A].to[C]
case Nil => f.fromSpecific(Seq.empty[A])
case _ =>
using(createBatchStatementExecutor(
conn = conn,
Expand All @@ -708,7 +708,7 @@ trait DBSession extends LogSupport with LoanPattern with AutoCloseable {
executor.bindParams(params)
executor.addBatch()
}
execute(executor).to[C]
f.fromSpecific(execute(executor))
}
}
}
Expand All @@ -722,10 +722,10 @@ trait DBSession extends LogSupport with LoanPattern with AutoCloseable {
*/
def batchAndReturnGeneratedKey[C[_]](template: String, paramsList: scala.collection.Seq[Any]*)(
implicit
cbf: CanBuildFrom[Nothing, Long, C[Long]]): C[Long] = {
f: Factory[Long, C[Long]]): C[Long] = {
ensureNotReadOnlySession(template)
paramsList match {
case Nil => Seq.empty[Long].to[C]
case Nil => f.fromSpecific(Seq.empty[Long])
case _ =>
using(createBatchStatementExecutor(
conn = conn,
Expand All @@ -738,7 +738,7 @@ trait DBSession extends LogSupport with LoanPattern with AutoCloseable {
executor.addBatch()
}
executor.executeBatch()
new ResultSetIterator(executor.generatedKeysResultSet).map(_.long(1)).to[C]
f.fromSpecific(new ResultSetIterator(executor.generatedKeysResultSet).map(_.long(1)))
}
}
}
Expand All @@ -753,10 +753,10 @@ trait DBSession extends LogSupport with LoanPattern with AutoCloseable {
*/
def batchAndReturnSpecifiedGeneratedKey[C[_]](template: String, key: String, paramsList: scala.collection.Seq[Any]*)(
implicit
cbf: CanBuildFrom[Nothing, Long, C[Long]]): C[Long] = {
f: Factory[Long, C[Long]]): C[Long] = {
ensureNotReadOnlySession(template)
paramsList match {
case Nil => Seq.empty[Long].to[C]
case Nil => f.fromSpecific(Seq.empty[Long])
case _ =>
using(createBatchStatementExecutor(
conn = conn,
Expand All @@ -769,7 +769,7 @@ trait DBSession extends LogSupport with LoanPattern with AutoCloseable {
executor.addBatch()
}
executor.executeBatch()
new ResultSetIterator(executor.generatedKeysResultSet).map(_.long(key)).to[C]
f.fromSpecific(new ResultSetIterator(executor.generatedKeysResultSet).map(_.long(key)))
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package scalikejdbc

import java.sql.PreparedStatement

import scala.collection.generic.CanBuildFrom
import scala.collection.compat._
import scala.language.higherKinds

/**
Expand Down Expand Up @@ -51,7 +51,7 @@ private[scalikejdbc] final class DBSessionWrapper(
override def list[A](template: String, params: Any*)(extract: (WrappedResultSet) => A): List[A] = {
withAttributesSwitchedDBSession(_.list(template, params: _*)(extract))
}
override def collection[A, C[_]](template: String, params: Any*)(extract: (WrappedResultSet) => A)(implicit cbf: CanBuildFrom[Nothing, A, C[A]]): C[A] = {
override def collection[A, C[_]](template: String, params: Any*)(extract: (WrappedResultSet) => A)(implicit f: Factory[A, C[A]]): C[A] = {
withAttributesSwitchedDBSession(_.collection(template, params: _*)(extract))
}
override def foreach(template: String, params: Any*)(f: (WrappedResultSet) => Unit): Unit = {
Expand Down Expand Up @@ -91,13 +91,13 @@ private[scalikejdbc] final class DBSessionWrapper(
override def updateAndReturnSpecifiedGeneratedKey(template: String, params: Any*)(key: Any): Long = {
withAttributesSwitchedDBSession(_.updateAndReturnSpecifiedGeneratedKey(template, params: _*)(key))
}
override def batch[C[_]](template: String, paramsList: scala.collection.Seq[Any]*)(implicit cbf: CanBuildFrom[Nothing, Int, C[Int]]): C[Int] = {
override def batch[C[_]](template: String, paramsList: scala.collection.Seq[Any]*)(implicit f: Factory[Int, C[Int]]): C[Int] = {
withAttributesSwitchedDBSession(_.batch(template, paramsList: _*))
}
override def batchAndReturnGeneratedKey[C[_]](template: String, paramsList: scala.collection.Seq[Any]*)(implicit cbf: CanBuildFrom[Nothing, Long, C[Long]]): C[Long] = {
override def batchAndReturnGeneratedKey[C[_]](template: String, paramsList: scala.collection.Seq[Any]*)(implicit f: Factory[Long, C[Long]]): C[Long] = {
withAttributesSwitchedDBSession(_.batchAndReturnGeneratedKey(template, paramsList: _*))
}
override def batchAndReturnSpecifiedGeneratedKey[C[_]](template: String, key: String, paramsList: scala.collection.Seq[Any]*)(implicit cbf: CanBuildFrom[Nothing, Long, C[Long]]): C[Long] = {
override def batchAndReturnSpecifiedGeneratedKey[C[_]](template: String, key: String, paramsList: scala.collection.Seq[Any]*)(implicit f: Factory[Long, C[Long]]): C[Long] = {
withAttributesSwitchedDBSession(_.batchAndReturnSpecifiedGeneratedKey(template, key, paramsList: _*))
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package scalikejdbc

import scala.collection.mutable.LinkedHashMap
import scala.collection.generic.CanBuildFrom
import scala.collection.compat._
import scala.language.higherKinds

private[scalikejdbc] trait OneToManies2Extractor[A, B1, B2, E <: WithExtractor, Z]
Expand Down Expand Up @@ -31,7 +31,7 @@ private[scalikejdbc] trait OneToManies2Extractor[A, B1, B2, E <: WithExtractor,
private[scalikejdbc] def toTraversable(session: DBSession, sql: String, params: scala.collection.Seq[_], zExtractor: (A, scala.collection.Seq[B1], scala.collection.Seq[B2]) => Z): Traversable[Z] = {
val attributesSwitcher = createDBSessionAttributesSwitcher()
DBSessionWrapper(session, attributesSwitcher)
.foldLeft(statement, rawParameters: _*)(LinkedHashMap[A, (Seq[B1], scala.collection.Seq[B2])]())(processResultSet).map {
.foldLeft(statement, rawParameters.toSeq: _*)(LinkedHashMap[A, (Seq[B1], scala.collection.Seq[B2])]())(processResultSet).map {
case (one, (t1, t2)) => zExtractor(one, t1, t2)
}
}
Expand Down Expand Up @@ -110,8 +110,8 @@ final class OneToManies2SQLToCollection[A, B1, B2, E <: WithExtractor, Z] privat

import GeneralizedTypeConstraintsForWithExtractor._

override def apply[C[_]]()(implicit session: DBSession, context: ConnectionPoolContext = NoConnectionPoolContext, hasExtractor: ThisSQL =:= SQLWithExtractor, cbf: CanBuildFrom[Nothing, Z, C[Z]]): C[Z] = {
executeQuery(session, (session: DBSession) => toTraversable(session, statement, rawParameters, zExtractor).to[C])
override def apply[C[_]]()(implicit session: DBSession, context: ConnectionPoolContext = NoConnectionPoolContext, hasExtractor: ThisSQL =:= SQLWithExtractor, f: Factory[Z, C[Z]]): C[Z] = {
executeQuery(session, (session: DBSession) => f.fromSpecific(toTraversable(session, statement, rawParameters, zExtractor)))
}

private[scalikejdbc] def extractOne: WrappedResultSet => A = one
Expand Down