Permalink
Browse files

SQL Server adapter gets some love (closes #4298) [rtomayko@gmail.com]

git-svn-id: http://svn-commit.rubyonrails.org/rails/trunk@3949 5ecf4fe2-1ee6-0310-87b1-e25e094e27de
  • Loading branch information...
1 parent c0ad3b6 commit 9db8f3e57a0b18340b151033c53caaa4c679a801 @dhh dhh committed Mar 18, 2006
View
@@ -1,5 +1,7 @@
*SVN*
+* SQL Server adapter gets some love #4298 [rtomayko@gmail.com]
+
* Added OpenBase database adapter that builds on top of the http://www.spice-of-life.net/ruby-openbase/ driver. All functionality except LIMIT/OFFSET is supported #3528 [derrickspell@cdmplus.com]
* Rework table aliasing to account for truncated table aliases. Add smarter table aliasing when doing eager loading of STI associations. This allows you to use the association name in the order/where clause. [Jonathan Viney / Rick Olson] #4108 Example (SpecialComment is using STI):
@@ -27,6 +27,7 @@ def self.sqlserver_connection(config) #:nodoc:
mode = config[:mode] ? config[:mode].to_s.upcase : 'ADO'
username = config[:username] ? config[:username].to_s : 'sa'
password = config[:password] ? config[:password].to_s : ''
+ autocommit = config.key?(:autocommit) ? config[:autocommit] : true
if mode == "ODBC"
raise ArgumentError, "Missing DSN. Argument ':dsn' must be set in order for this adapter to work." unless config.has_key?(:dsn)
dsn = config[:dsn]
@@ -38,8 +39,7 @@ def self.sqlserver_connection(config) #:nodoc:
driver_url = "DBI:ADO:Provider=SQLOLEDB;Data Source=#{host};Initial Catalog=#{database};User Id=#{username};Password=#{password};"
end
conn = DBI.connect(driver_url, username, password)
-
- conn["AutoCommit"] = true
+ conn["AutoCommit"] = autocommit
ConnectionAdapters::SQLServerAdapter.new(conn, logger, [driver_url, username, password])
end
end # class Base
@@ -48,8 +48,8 @@ module ConnectionAdapters
class ColumnWithIdentity < Column# :nodoc:
attr_reader :identity, :is_special, :scale
- def initialize(name, default, sql_type = nil, is_identity = false, scale_value = 0)
- super(name, default, sql_type)
+ def initialize(name, default, sql_type = nil, is_identity = false, null = true, scale_value = 0)
+ super(name, default, sql_type, null)
@identity = is_identity
@is_special = sql_type =~ /text|ntext|image/i ? true : false
@scale = scale_value
@@ -243,14 +243,20 @@ def columns(table_name, name = nil)
return [] if table_name.blank?
table_name = table_name.to_s if table_name.is_a?(Symbol)
table_name = table_name.split('.')[-1] unless table_name.nil?
- sql = "SELECT COLUMN_NAME as ColName, COLUMN_DEFAULT as DefaultValue, DATA_TYPE as ColType, COL_LENGTH('#{table_name}', COLUMN_NAME) as Length, COLUMNPROPERTY(OBJECT_ID('#{table_name}'), COLUMN_NAME, 'IsIdentity') as IsIdentity, NUMERIC_SCALE as Scale FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '#{table_name}'"
+ sql = "SELECT COLUMN_NAME as ColName, COLUMN_DEFAULT as DefaultValue, DATA_TYPE as ColType, IS_NULLABLE As IsNullable, COL_LENGTH('#{table_name}', COLUMN_NAME) as Length, COLUMNPROPERTY(OBJECT_ID('#{table_name}'), COLUMN_NAME, 'IsIdentity') as IsIdentity, NUMERIC_SCALE as Scale FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '#{table_name}'"
# Comment out if you want to have the Columns select statment logged.
- # Personnally, I think it adds unneccessary bloat to the log.
+ # Personally, I think it adds unnecessary bloat to the log.
# If you do comment it out, make sure to un-comment the "result" line that follows
result = log(sql, name) { @connection.select_all(sql) }
#result = @connection.select_all(sql)
columns = []
- result.each { |field| columns << ColumnWithIdentity.new(field[:ColName], field[:DefaultValue].to_s.gsub!(/[()\']/,"") =~ /null/ ? nil : field[:DefaultValue], "#{field[:ColType]}(#{field[:Length]})", field[:IsIdentity] == 1 ? true : false, field[:Scale]) }
+ result.each do |field|
+ default = field[:DefaultValue].to_s.gsub!(/[()\']/,"") =~ /null/ ? nil : field[:DefaultValue]
+ type = "#{field[:ColType]}(#{field[:Length]})"
+ is_identity = field[:IsIdentity] == 1
+ is_nullable = field[:IsNullable] == 'YES'
+ columns << ColumnWithIdentity.new(field[:ColName], default, type, is_identity, is_nullable, field[:Scale])
+ end
columns
end
@@ -466,7 +472,7 @@ def remove_index(table_name, options = {})
def type_to_sql(type, limit = nil) #:nodoc:
native = native_database_types[type]
# if there's no :limit in the default type definition, assume that type doesn't support limits
- limit = native[:limit] ? limit || native[:limit] : nil
+ limit = limit || native[:limit]
column_type_sql = native[:name]
column_type_sql << "(#{limit})" if limit
column_type_sql
@@ -524,11 +530,13 @@ def query_contains_identity_column(sql, col)
end
def change_order_direction(order)
- case order
- when /\bDESC\b/i then order.gsub(/\bDESC\b/i, "ASC")
- when /\bASC\b/i then order.gsub(/\bASC\b/i, "DESC")
- else String.new(order).split(',').join(' DESC,') + ' DESC'
- end
+ order.split(",").collect {|fragment|
+ case fragment
+ when /\bDESC\b/i then fragment.gsub(/\bDESC\b/i, "ASC")
+ when /\bASC\b/i then fragment.gsub(/\bASC\b/i, "DESC")
+ else String.new(fragment).split(',').join(' DESC,') + ' DESC'
+ end
+ }.join(",")
end
def get_special_columns(table_name)
@@ -745,6 +745,18 @@ def test_boolean
b_true = Booleantest.find(true_id)
assert b_true.value?
end
+
+ def test_boolean_cast_from_string
+ b_false = Booleantest.create({ "value" => "false" })
+ false_id = b_false.id
+ b_true = Booleantest.create({ "value" => "true" })
+ true_id = b_true.id
+
+ b_false = Booleantest.find(false_id)
+ assert !b_false.value?
+ b_true = Booleantest.find(true_id)
+ assert b_true.value?
+ end
def test_clone
topic = Topic.find(1)
@@ -1163,6 +1175,7 @@ def test_to_xml
xml = topics(:first).to_xml(:indent => 0, :skip_instruct => true)
bonus_time_in_current_timezone = topics(:first).bonus_time.xmlschema
written_on_in_current_timezone = topics(:first).written_on.xmlschema
+ last_read_in_current_timezone = topics(:first).last_read.xmlschema
assert_equal "<topic>", xml.first(7)
assert xml.include?(%(<title>The First Topic</title>))
assert xml.include?(%(<author-name>David</author-name>))
@@ -1172,8 +1185,8 @@ def test_to_xml
assert xml.include?(%(<content>Have a nice day</content>))
assert xml.include?(%(<author-email-address>david@loudthinking.com</author-email-address>))
assert xml.include?(%(<parent-id></parent-id>))
- if current_adapter?(:SybaseAdapter)
- assert xml.include?(%(<last-read type="datetime">2004-04-15T00:00:00-05:00</last-read>))
+ if current_adapter?(:SybaseAdapter) or current_adapter?(:SQLServerAdapter)
+ assert xml.include?(%(<last-read type="datetime">#{last_read_in_current_timezone}</last-read>))
else
assert xml.include?(%(<last-read type="date">2004-04-15</last-read>))
end
@@ -58,6 +58,11 @@ def test_find_all_with_prepared_limit_and_offset
assert_equal(entrants(:third).name, entrants.first.name)
end
+ def test_find_all_with_limit_and_offset_and_multiple_orderings
+ developers = Developer.find(:all, :order => "salary ASC, id DESC", :limit => 3, :offset => 1)
+ assert_equal ["David", "fixture_10", "fixture_9"], developers.collect {|d| d.name}
+ end
+
def test_find_with_limit_and_condition
developers = Developer.find(:all, :order => "id DESC", :conditions => "salary = 100000", :limit => 3, :offset =>7)
assert_equal(1, developers.size)
@@ -28,7 +28,7 @@ CREATE TABLE topics (
bonus_time datetime default NULL,
last_read datetime default NULL,
content varchar(255) default NULL,
- approved tinyint default 1,
+ approved bit default 1,
replies_count int default 0,
parent_id int default NULL,
type varchar(50) default NULL
@@ -14,6 +14,15 @@ def test_schema_dump
assert_match %r{create_table "authors"}, output
assert_no_match %r{create_table "schema_info"}, output
end
+
+ def test_schema_dump_includes_not_null_columns
+ stream = StringIO.new
+
+ ActiveRecord::SchemaDumper.ignore_tables = [/^[^s]/]
+ ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, stream)
+ output = stream.string
+ assert_match %r{:null => false}, output
+ end
def test_schema_dump_with_string_ignored_table
stream = StringIO.new

0 comments on commit 9db8f3e

Please sign in to comment.