/
package.scala
227 lines (220 loc) · 10.4 KB
/
package.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
package scala.slick.jdbc
import slick.util.SlickLogger
import org.slf4j.LoggerFactory
import scala.reflect.ClassTag
import scala.slick.SlickException
/** A wrapper around jdbc's meta data api and logic to create a Slick model from it. */
package object meta{
import scala.slick.driver.JdbcProfile
import scala.slick.jdbc.JdbcBackend
import scala.slick.ast.ColumnOption
/**
* Creates a Slick data model from jdbc meta data.
* Foreign keys pointing out of the given tables are not included.
* @param mTables tables to include in the model
* @param profile JdbcProfile that was used to retrieve mTables (using a different one can lead to exceptions)
*/
def createModel(mTables: Seq[MTable], profile: JdbcProfile)(implicit session: JdbcBackend#Session) : slick.model.Model = {
lazy val logger = new SlickLogger(LoggerFactory.getLogger("scala.slick.jdbc.meta"))
import java.sql.DatabaseMetaData
import scala.slick.{model => m}
import collection.immutable.ListMap
lazy val mTablesByMQName: Map[MQName,MTable] = mTables.map(t => t.name -> t).toMap
lazy val mPrimaryKeysByMQName: Map[MQName,Seq[MPrimaryKey]] = mTables.map(t => t.name -> t.getPrimaryKeys.list.sortBy(_.keySeq)).toMap
val tableNameByMQName = mTables.map(_.name).map( name =>
name -> m.QualifiedName(
name.name,
schema=name.schema
// TODO: move these out into the drivers
.filter(_ != "PUBLIC") // H2 / Hsqldb default
.filter(_ != "public") // Postgres default
.filter(_ != "APP"), // Derby default
name.catalog
)
).toMap
val columnsByTableAndName: Map[MQName,Map[String,m.Column]] = {
def column(tableName: m.QualifiedName, column: MColumn) = {
val mPrimaryKeys = mPrimaryKeysByMQName(column.table)
val IntValue = "^([0-9]*)$".r
val DoubleValue = "^([0-9*]\\.[0-9]*)$".r
val StringValue = """^'(.+)'$""".r
import ColumnOption._
val tpe = jdbcTypeToScala(column.sqlType).toString match {
case "Object" => "AnyRef"
case "java.lang.String" => "String"
case t => t
}
val nullable = column.nullable.getOrElse(true)
val autoInc = column.isAutoInc.getOrElse(false)
val c = m.Column(
name=column.name,
table=tableName,
tpe=tpe,
nullable=nullable,
// omitting the DBType as it is not portable between backends
options = Set() ++
(if(autoInc) Some(AutoInc) else None) ++
(column.columnDef.filter(_ => !autoInc).flatMap( v =>
if(v=="NULL"){
Some(None)
} else {
val mapped = try{
Some((v,tpe) match {
// NOTE: When extending this list, please also extend the code generator accordingly
case (_,"Int") => v.toInt
case (_,"Long") => v.toLong
case (_,"Short") => v.toShort // seen in Derby
case (_,"Float") => v.toFloat
case (_,"Double") => v.toDouble
case (StringValue(str),"String") => str
//case (_,"String") => v // seen in MySQL // buggy in postgres, found value 'unchecked'::character varying
case ("1","Boolean") => true // seen in MySQL
case ("0","Boolean") => false
case ("true","Boolean") => true // seen in postgres
case ("false","Boolean") => false
case ("TRUE","Boolean") => true // seen in H2
case ("FALSE","Boolean") => false
case ("CURRENT_TIMESTAMP","java.sql.Timestamp") => throw new SlickException(s"Ignoring default value CURRENT_TIMESTAMP of column $tableName.${column.name} of type $tpe")
case _ => throw new SlickException(s"Could not parse default value $v of column $tableName.${column.name} of type $tpe")
})
} catch {
case e: java.lang.NumberFormatException => logger.debug(s"NumberFormatException: Could not parse default value $v of column $tableName.${column.name} as $tpe"); None
case e: SlickException => logger.debug(e.getMessage); None
}
if(nullable)
Some(mapped)
else mapped
}
).map(Default.apply)) ++
// Add ColumnOption if single column primary key
(if(mPrimaryKeys.size == 1) mPrimaryKeys.filter(_.column == column.name).map(_ => PrimaryKey) else Set())
)
c
}
mTablesByMQName.mapValues( t => ListMap(t.getColumns.list.sortBy(_.ordinalPosition).map(c => c.name -> column(tableNameByMQName(t.name),c)):_*))
}
def table(mTable: MTable) = {
val tableName = tableNameByMQName(mTable.name)
val columns = columnsByTableAndName(mTable.name).values.toSeq
val columnsByName: Map[String,m.Column] = columns.map(c => c.name -> c).toMap
def primaryKey(mPrimaryKeys:Seq[MPrimaryKey]) = {
// single column primary keys excluded in favor of PrimaryKey column option
if(mPrimaryKeys.size <= 1) None else Some(
m.PrimaryKey(
mPrimaryKeys.head.pkName.filter(_ != "")/*MySQL workaround:*/.filter(_ != "PRIMARY"),
tableName,
mPrimaryKeys.map(_.column).map(columnsByName)
)
)
}
def foreignKeys(mForeignKeys:Seq[MForeignKey]) = {
mForeignKeys
// remove foreign keys pointing to tables which were not included
.filter(fk => mTablesByMQName.isDefinedAt(fk.pkTable))
.groupBy(fk => (fk.pkTable,fk.fkName,fk.pkName,fk.fkTable))
.toSeq
.sortBy{case (key,_) => (key._1.name,key._2,key._3,key._4.name)}
.map(_._2.sortBy(_.keySeq)) // respect order
.map{ fks =>
val fk = fks.head
assert(tableName == tableNameByMQName(fk.fkTable))
val fkColumns = fks.map(_.fkColumn).map(columnsByName)
val pkColumns = fks.map(_.pkColumn).map(columnsByTableAndName(fk.pkTable))
assert(fkColumns.size == pkColumns.size)
m.ForeignKey(
fk.fkName.filter(_ != ""),
tableName,
fkColumns,
tableNameByMQName(fk.pkTable),
pkColumns,
fk.updateRule,
fk.deleteRule
)
}
}
def indices(mTable: MTable) = {
try{
mTable.getIndexInfo().list
// filter out unnecessary tableIndexStatistic (we can safely call .get later)
.filter(_.indexType != DatabaseMetaData.tableIndexStatistic)
.groupBy(_.indexName)
.toSeq
.sortBy(_._1)
.map(_._2.sortBy(_.ordinalPosition)) // respect order
.map{ mIndices =>
val idx = mIndices.head
val cols = mIndices.map(
_.column.get.stripPrefix("\"").stripSuffix("\"") // strip " to work around postgres issue
)
if(!cols.forall(columnsByName.isDefinedAt)){
// postgres may refer to column oid, skipping index for now. Maybe we should generate a column and include it instead.
logger.debug(s"Skipping index ${idx.indexName} of table ${mTable.name.name} because it refered to undefined columns: "+(cols.toSet intersect columnsByName.keys.toSet))
None
} else
Some(m.Index(
idx.indexName.filter(_ != ""),
tableName,
cols.map(columnsByName),
!idx.nonUnique
))
}.flatten
} catch {
case e:java.sql.SQLException =>
logger.debug(s"Skipping indices of table ${mTable.name.name} due to exception during getIndexInfo: "+e.getMessage.trim)
Seq()
}
}
val mPrimaryKeys = mPrimaryKeysByMQName(mTable.name)
val fks = foreignKeys(mTable.getImportedKeys.list)
m.Table(
tableName,
columns,
primaryKey(mPrimaryKeys),
fks,
// indices not including primary key and table statistics
indices(mTable)
.filter{
// filter out foreign key index
case idx if !idx.unique => !fks.exists(_.referencingColumns.toSet == idx.columns.toSet)
// filter out primary key index
case idx => mPrimaryKeys.isEmpty || mPrimaryKeys.map(_.column).toSeq != idx.columns.map(_.name).toSeq
}
)
}
m.Model( mTables.sortBy(_.name.name).map(table) )
}
/** Converts from java.sql.Types to the corresponding Java class name (with fully qualified path). */
def jdbcTypeToScala(jdbcType: Int): ClassTag[_] = {
lazy val logger = new SlickLogger(LoggerFactory.getLogger("scala.slick.jdbc.meta"))
import java.sql.Types._
import scala.reflect.classTag
// see TABLE B-1 of JSR-000221 JBDCTM API Specification 4.1 Maintenance Release
// Mapping to corresponding Scala types where applicable
jdbcType match {
case CHAR | VARCHAR | LONGVARCHAR | NCHAR | NVARCHAR | LONGNVARCHAR => classTag[String]
case NUMERIC | DECIMAL => classTag[BigDecimal]
case BIT | BOOLEAN => classTag[Boolean]
case TINYINT => classTag[Byte]
case SMALLINT => classTag[Short]
case INTEGER => classTag[Int]
case BIGINT => classTag[Long]
case REAL => classTag[Float]
case FLOAT | DOUBLE => classTag[Double]
case BINARY | VARBINARY | LONGVARBINARY | BLOB => classTag[java.sql.Blob]
case DATE => classTag[java.sql.Date]
case TIME => classTag[java.sql.Time]
case TIMESTAMP => classTag[java.sql.Timestamp]
case CLOB => classTag[java.sql.Clob]
// case ARRAY => classTag[java.sql.Array]
// case STRUCT => classTag[java.sql.Struct]
// case REF => classTag[java.sql.Ref]
// case DATALINK => classTag[java.net.URL]
// case ROWID => classTag[java.sql.RowId]
// case NCLOB => classTag[java.sql.NClob]
// case SQLXML => classTag[java.sql.SQLXML]
case NULL => classTag[Null]
case DISTINCT => logger.warn(s"Found jdbc type DISTINCT. Assuming Blob. This may be wrong."); classTag[java.sql.Blob] // FIXME
case t => logger.warn(s"Found unknown jdbc type $t. Assuming String. This may be wrong."); classTag[String] // FIXME
}
}
}