Skip to content
This repository
Browse code

Minor updates to Schemifier. Fixes #1038

Add internal logging and support for structureOnly changes
  • Loading branch information...
commit 73ba4869f9aa4f8a5282003e3fef145b80edaa0d 1 parent ef58e9d
Jeppe Nejsum Madsen authored June 20, 2011
1  .gitignore
@@ -46,6 +46,7 @@ project/plugins/project
46 46
 #  ^\.pc/
47 47
 
48 48
 # IntelliJ
  49
+*.eml
49 50
 *.iml
50 51
 *.ipr
51 52
 *.iws
4  persistence/mapper/src/main/scala/net/liftweb/mapper/FieldFinder.scala
@@ -45,12 +45,12 @@ class FieldFinder[T: ClassManifest](metaMapper: AnyRef, logger: net.liftweb.comm
45 45
         val fields = Map(c.getDeclaredFields.
46 46
                           filter{f =>
47 47
                             val ret = typeFilter(f.getType)
48  
-                            logger.debug("typeFilter(" + f.getType + "); T=" + classManifest[T].erasure)
  48
+                            logger.trace("typeFilter(" + f.getType + "); T=" + classManifest[T].erasure)
49 49
                             ret
50 50
                           }.
51 51
                           map(f => (deMod(f.getName), f)) :_*)
52 52
 
53  
-        logger.debug("fields: " + fields)
  53
+        logger.trace("fields: " + fields)
54 54
 
55 55
         // this method will find all the super classes and super-interfaces
56 56
         def getAllSupers(clz: Class[_]): List[Class[_]] = clz match {
83  persistence/mapper/src/main/scala/net/liftweb/mapper/Schemifier.scala
@@ -50,12 +50,21 @@ object Schemifier extends Loggable {
50 50
   def neverF(msg: => AnyRef) = {}
51 51
   
52 52
  
53  
-  def schemify(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, stables: BaseMetaMapper*): List[String] = schemify(performWrite, logFunc, DefaultConnectionIdentifier, stables :_*)
54  
-
55  
-  case class Collector(funcs: List[() => Any], cmds: List[String]) {
  53
+  def schemify(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, stables: BaseMetaMapper*): List[String] = 
  54
+    schemify(performWrite,  logFunc, DefaultConnectionIdentifier, stables :_*)
  55
+  
  56
+  def schemify(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, dbId: ConnectionIdentifier, stables: BaseMetaMapper*): List[String] = 
  57
+    schemify(performWrite, false, logFunc, dbId, stables :_*)
  58
+ 
  59
+  def schemify(performWrite: Boolean, structureOnly: Boolean, logFunc: (=> AnyRef) => Unit, stables: BaseMetaMapper*): List[String] = 
  60
+    schemify(performWrite, structureOnly, logFunc, DefaultConnectionIdentifier, stables :_*)
  61
+    
  62
+  private case class Collector(funcs: List[() => Any], cmds: List[String]) {
56 63
     def +(other: Collector) = Collector(funcs ::: other.funcs, cmds ::: other.cmds)
57 64
   }
58 65
 
  66
+  private val EmptyCollector = new Collector(Nil, Nil)
  67
+  
59 68
   private def using[RetType <: Any, VarType <: ResultSet](f: => VarType)(f2: VarType => RetType): RetType = {
60 69
     val theVar = f
61 70
     try {
@@ -65,7 +74,19 @@ object Schemifier extends Loggable {
65 74
     }
66 75
   }
67 76
 
68  
-  def schemify(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, dbId: ConnectionIdentifier, stables: BaseMetaMapper*): List[String] = {
  77
+  /**
  78
+   * Modify database specified in dbId so it matches the structure specified in the MetaMappers
  79
+   * 
  80
+   * @param performWrite if false, will not write any changes to the database, only collect them
  81
+   * @param structureOnly if true, will only check tables and columns, not indexes and constraints. 
  82
+   *    Useful if schema is maintained outside Lift, but still needs structure to be in sync
  83
+   * @param logFunc A function that will be called for each statement being executed if performWrite == true
  84
+   * @param dbId The ConnectionIdentifier to be used
  85
+   * @param stables The MetaMapper instances to check
  86
+   * 
  87
+   * @return The list of statements needed to bring the database in a consistent state. This list is created even if performWrite=false  
  88
+   */
  89
+  def schemify(performWrite: Boolean, structureOnly: Boolean, logFunc: (=> AnyRef) => Unit, dbId: ConnectionIdentifier, stables: BaseMetaMapper*): List[String] = {
69 90
     val tables = stables.toList
70 91
     DB.use(dbId) { con =>
71 92
       // Some databases (Sybase) don't like doing transactional DDL, so we disable transactions
@@ -73,27 +94,41 @@ object Schemifier extends Loggable {
73 94
         con.connection.commit
74 95
         con.connection.setAutoCommit(true)
75 96
       }
  97
+      logger.debug("Starting schemify. write=%s, structureOnly=%s, dbId=%s, schema=%s, tables=%s".format(performWrite, structureOnly, dbId, getDefaultSchemaName(con), tables.map(_.dbTableName)))
76 98
 
77 99
       val connection = con // SuperConnection(con)
78 100
       val driver = DriverType.calcDriver(connection)
79 101
       val actualTableNames = new HashMap[String, String]
80  
-      if (performWrite) tables.foreach(_.beforeSchemifier)
  102
+      if (performWrite) {
  103
+        tables.foreach{t =>
  104
+          logger.debug("Running beforeSchemifier on table %s".format(t.dbTableName))
  105
+          t.beforeSchemifier
  106
+        }
  107
+      }
  108
+      
  109
+      def tableCheck(t: BaseMetaMapper, desc: String, f: => Collector): Collector = {
  110
+        actualTableNames.get(t._dbTableNameLC).map(x => f).getOrElse{
  111
+          logger.warn("Skipping %s on table '%s' since it doesn't exist".format(desc, t.dbTableName))
  112
+          EmptyCollector
  113
+        }
  114
+      }
  115
+      
81 116
       val toRun =
82  
-      tables.foldLeft(Collector(Nil, Nil))((b, t) => b + ensureTable(performWrite, logFunc, t, connection, actualTableNames)) +
83  
-      tables.foldLeft(Collector(Nil, Nil))((b, t) => b + ensureColumns(performWrite, logFunc, t, connection, actualTableNames)) +
84  
-      tables.foldLeft(Collector(Nil, Nil))((b, t) => b + ensureIndexes(performWrite, logFunc, t, connection, actualTableNames)) +
85  
-      tables.foldLeft(Collector(Nil, Nil))((b, t) => b + ensureConstraints(performWrite, logFunc, t, dbId, connection, actualTableNames))
86  
-
87  
-      /*
88  
-       val toRun = tables.flatMap(t => ensureTable(performWrite, t, connection, actualTableNames) ) :::
89  
-       tables.flatMap{t => ensureColumns(performWrite, t, connection, actualTableNames)} :::
90  
-       tables.flatMap{t => ensureIndexes(performWrite, t, connection, actualTableNames)} :::
91  
-       tables.flatMap{t => ensureConstraints(performWrite, t, connection, actualTableNames)}
92  
-       */
  117
+        tables.foldLeft(EmptyCollector)((b, t) => b + ensureTable(performWrite, logFunc, t, connection, actualTableNames)) +
  118
+        tables.foldLeft(EmptyCollector)((b, t) => b + tableCheck(t, "ensureColumns", ensureColumns(performWrite, logFunc, t, connection, actualTableNames))) +
  119
+        (if (structureOnly) 
  120
+          EmptyCollector 
  121
+        else
  122
+          (tables.foldLeft(EmptyCollector)((b, t) => b + tableCheck(t, "ensureIndexes", ensureIndexes(performWrite, logFunc, t, connection, actualTableNames))) +
  123
+           tables.foldLeft(EmptyCollector)((b, t) => b + tableCheck(t, "ensureConstraints", ensureConstraints(performWrite, logFunc, t, dbId, connection, actualTableNames)))))
93 124
 
94 125
       if (performWrite) {
95  
-        tables.foreach(_.afterSchemifier)
96  
-        toRun.funcs.foreach(f => f())
  126
+        logger.debug("Executing DDL statements")
  127
+    	toRun.funcs.foreach(f => f())
  128
+        tables.foreach{t =>
  129
+          logger.debug("Running afterSchemifier on table %s".format(t.dbTableName))
  130
+          t.afterSchemifier
  131
+        }
97 132
       }
98 133
 
99 134
       toRun.cmds
@@ -166,6 +201,7 @@ object Schemifier extends Loggable {
166 201
    */
167 202
   private def maybeWrite(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, connection: SuperConnection) (makeSql: () => String) : String ={
168 203
     val ct = makeSql()
  204
+    logger.trace("maybeWrite DDL: "+ct)
169 205
     if (performWrite) {
170 206
       logFunc(ct)
171 207
       val st = connection.createStatement
@@ -176,8 +212,9 @@ object Schemifier extends Loggable {
176 212
   }
177 213
 
178 214
   private def ensureTable(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, table: BaseMetaMapper, connection: SuperConnection, actualTableNames: HashMap[String, String]): Collector = {
179  
-    val hasTable = hasTable_?(table, connection, actualTableNames)
  215
+    val hasTable = logger.trace("Does table exist?: "+table.dbTableName, hasTable_?(table, connection, actualTableNames))
180 216
     val cmds = new ListBuffer[String]()
  217
+    
181 218
     if (!hasTable) {
182 219
       cmds += maybeWrite(performWrite, logFunc, connection) {
183 220
         () => "CREATE TABLE "+table._dbTableNameLC+" ("+createColumns(table, connection).mkString(" , ")+") "+connection.createTablePostpend
@@ -219,12 +256,14 @@ object Schemifier extends Loggable {
219 256
           if (tableName == table._dbTableNameLC.toLowerCase && field.dbColumnNames(field.name).map(_.toLowerCase).contains(columnName)) {
220 257
             cols = columnName :: cols
221 258
             hasColumn = hasColumn + 1
  259
+            logger.trace("Column exists: %s.%s ".format(table.dbTableName, columnName))
  260
+      
222 261
           }
223 262
         })
224  
-
225 263
       // FIXME deal with column types
226  
-      (field.dbColumnNames(field.name).filter(f => !cols.map(_.toLowerCase).contains(f.toLowerCase))).foreach {
227  
-        colName =>
  264
+      (field.dbColumnNames(field.name).filter(f => !cols.map(_.toLowerCase).contains(f.toLowerCase))).foreach {colName =>
  265
+        logger.trace("Column does not exist: %s.%s ".format(table.dbTableName, colName))
  266
+          
228 267
         cmds += maybeWrite(performWrite, logFunc, connection) {
229 268
           () => "ALTER TABLE "+table._dbTableNameLC+" "+connection.driverType.alterAddColumn+" "+field.fieldCreatorString(connection.driverType, colName)
230 269
         }
38  persistence/mapper/src/test/scala/net/liftweb/mapper/SchemifierSpec.scala
... ...
@@ -0,0 +1,38 @@
  1
+/*
  2
+ * Copyright 2011 WorldWide Conferencing, LLC
  3
+ *
  4
+ * Licensed under the Apache License, Version 2.0 (the "License");
  5
+ * you may not use this file except in compliance with the License.
  6
+ * You may obtain a copy of the License at
  7
+ *
  8
+ *     http://www.apache.org/licenses/LICENSE-2.0
  9
+ *
  10
+ * Unless required by applicable law or agreed to in writing, software
  11
+ * distributed under the License is distributed on an "AS IS" BASIS,
  12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13
+ * See the License for the specific language governing permissions and
  14
+ * limitations under the License.
  15
+ */
  16
+
  17
+package net.liftweb
  18
+package mapper
  19
+
  20
+import org.specs.Specification
  21
+
  22
+import common._
  23
+
  24
+
  25
+/**
  26
+ * Systems under specification for Schemifier.
  27
+ */
  28
+object SchemifierSpec extends Specification("Schemifier Specification") {
  29
+  val provider = DbProviders.H2MemoryProvider
  30
+  
  31
+  "Schemifier" should {
  32
+    "not crash in readonly if table doesn't exist" in {
  33
+      provider.setupDB
  34
+      Schemifier.schemify(false, Schemifier.neverF _, Thing)
  35
+    }
  36
+  }
  37
+}
  38
+

0 notes on commit 73ba486

Please sign in to comment.
Something went wrong with that request. Please try again.