Skip to content
Browse files

Added Time parsing exception handling.

Began refactoring the exceptions.
  • Loading branch information...
1 parent 7a29108 commit 275463c99f0c89787643ca91f3b9f999a2b6465e @wayneeseguin wayneeseguin committed Dec 19, 2007
View
12 lib/sequel.rb
@@ -1,12 +1,12 @@
-require 'metaid'
-require 'bigdecimal'
-require 'bigdecimal/util'
+require "metaid"
+require "bigdecimal"
+require "bigdecimal/util"
files = %w[
- core_ext core_sql array_keys error connection_pool pretty_table
+ core_ext core_sql array_keys exceptions connection_pool pretty_table
dataset migration model schema database worker
]
-dir = File.join(File.dirname(__FILE__), 'sequel')
+dir = File.join(File.dirname(__FILE__), "sequel")
files.each {|f| require(File.join(dir, f))}
module Sequel #:nodoc:
@@ -41,7 +41,7 @@ def method_missing(m, *args)
opts = args[1].merge(:database => args[0])
end
rescue
- raise SequelError, "Invalid parameters specified"
+ raise Sequel::Error::InvalidParameters
end
c.new(opts)
end
View
2 lib/sequel/adapters/db2.rb
@@ -16,7 +16,7 @@ def check_error(rc, msg)
case rc
when SQL_SUCCESS, SQL_SUCCESS_WITH_INFO: nil
else
- raise SequelError, msg
+ raise Sequel::Error, msg
end
end
View
2 lib/sequel/adapters/jdbc.rb
@@ -15,7 +15,7 @@ class Database < Sequel::Database
def connect
unless conn_string = @opts[:uri] || @opts[:url] || @opts[:database]
- raise SequelError, "No connection string specified"
+ raise Sequel::Error::NoConnectionString
end
unless conn_string =~ /^jdbc:/
conn_string = "jdbc:#{conn_string}"
View
528 lib/sequel/adapters/mysql.rb
@@ -1,265 +1,265 @@
-require 'mysql'
-
-# Monkey patch Mysql::Result to yield hashes with symbol keys
-class Mysql::Result
- MYSQL_TYPES = {
- 0 => :to_d, # MYSQL_TYPE_DECIMAL
- 1 => :to_i, # MYSQL_TYPE_TINY
- 2 => :to_i, # MYSQL_TYPE_SHORT
- 3 => :to_i, # MYSQL_TYPE_LONG
- 4 => :to_f, # MYSQL_TYPE_FLOAT
- 5 => :to_f, # MYSQL_TYPE_DOUBLE
- # 6 => ??, # MYSQL_TYPE_NULL
- 7 => :to_time, # MYSQL_TYPE_TIMESTAMP
- 8 => :to_i, # MYSQL_TYPE_LONGLONG
- 9 => :to_i, # MYSQL_TYPE_INT24
- 10 => :to_time, # MYSQL_TYPE_DATE
- 11 => :to_time, # MYSQL_TYPE_TIME
- 12 => :to_time, # MYSQL_TYPE_DATETIME
- 13 => :to_i, # MYSQL_TYPE_YEAR
- 14 => :to_time, # MYSQL_TYPE_NEWDATE
- # 15 => :to_s # MYSQL_TYPE_VARCHAR
- # 16 => :to_s, # MYSQL_TYPE_BIT
- 246 => :to_d, # MYSQL_TYPE_NEWDECIMAL
- 247 => :to_i, # MYSQL_TYPE_ENUM
- 248 => :to_i # MYSQL_TYPE_SET
- # 249 => :to_s, # MYSQL_TYPE_TINY_BLOB
- # 250 => :to_s, # MYSQL_TYPE_MEDIUM_BLOB
- # 251 => :to_s, # MYSQL_TYPE_LONG_BLOB
- # 252 => :to_s, # MYSQL_TYPE_BLOB
- # 253 => :to_s, # MYSQL_TYPE_VAR_STRING
- # 254 => :to_s, # MYSQL_TYPE_STRING
- # 255 => :to_s # MYSQL_TYPE_GEOMETRY
- }
-
- def convert_type(v, type)
- v ? ((t = MYSQL_TYPES[type]) ? v.send(t) : v) : nil
- end
-
- def columns(with_table = nil)
- unless @columns
- @column_types = []
- @columns = fetch_fields.map do |f|
- @column_types << f.type
- (with_table ? (f.table + "." + f.name) : f.name).to_sym
- end
- end
- @columns
- end
-
- def each_array(with_table = nil)
- c = columns
- while row = fetch_row
- c.each_with_index do |f, i|
- if (t = MYSQL_TYPES[@column_types[i]]) && (v = row[i])
- row[i] = v.send(t)
- end
- end
- row.keys = c
- yield row
- end
- end
-
- def each_hash(with_table = nil)
- c = columns
- while row = fetch_row
- h = {}
- c.each_with_index {|f, i| h[f] = convert_type(row[i], @column_types[i])}
- yield h
- end
- end
-end
-
-module Sequel
- module MySQL
- class Database < Sequel::Database
- set_adapter_scheme :mysql
-
- def serial_primary_key_options
- {:primary_key => true, :type => :integer, :auto_increment => true}
- end
-
- AUTO_INCREMENT = 'AUTO_INCREMENT'.freeze
-
- def auto_increment_sql
- AUTO_INCREMENT
- end
-
- def connect
- conn = Mysql.real_connect(@opts[:host], @opts[:user], @opts[:password],
- @opts[:database], @opts[:port], nil, Mysql::CLIENT_MULTI_RESULTS)
- conn.query_with_result = false
- if encoding = @opts[:encoding] || @opts[:charset]
- conn.query("set character_set_connection = '#{encoding}'")
- conn.query("set character_set_client = '#{encoding}'")
- conn.query("set character_set_results = '#{encoding}'")
- end
- conn.reconnect = true
- conn
- end
-
- def disconnect
- @pool.disconnect {|c| c.close}
- end
-
- def tables
- @pool.hold do |conn|
- conn.list_tables.map {|t| t.to_sym}
- end
- end
-
- def dataset(opts = nil)
- MySQL::Dataset.new(self, opts)
- end
-
- def execute(sql)
- @logger.info(sql) if @logger
- @pool.hold do |conn|
- conn.query(sql)
- end
- end
-
- def execute_select(sql)
- @logger.info(sql) if @logger
- @pool.hold do |conn|
- conn.query(sql)
- conn.use_result
- end
- end
-
- def execute_insert(sql)
- @logger.info(sql) if @logger
- @pool.hold do |conn|
- conn.query(sql)
- conn.insert_id
- end
- end
-
- def execute_affected(sql)
- @logger.info(sql) if @logger
- @pool.hold do |conn|
- conn.query(sql)
- conn.affected_rows
- end
- end
-
- def alter_table_sql(table, op)
- case op[:op]
- when :rename_column
- "ALTER TABLE #{table} CHANGE COLUMN #{literal(op[:name])} #{literal(op[:new_name])} #{op[:type]}"
- when :set_column_type
- "ALTER TABLE #{table} CHANGE COLUMN #{literal(op[:name])} #{literal(op[:name])} #{op[:type]}"
- when :drop_index
- "DROP INDEX #{default_index_name(table, op[:columns])} ON #{table}"
- else
- super(table, op)
- end
- end
-
- def transaction
- @pool.hold do |conn|
- @transactions ||= []
- if @transactions.include? Thread.current
- return yield(conn)
- end
- conn.query(SQL_BEGIN)
- begin
- @transactions << Thread.current
- result = yield(conn)
- conn.query(SQL_COMMIT)
- result
- rescue => e
- conn.query(SQL_ROLLBACK)
- raise e unless SequelRollbackError === e
- ensure
- @transactions.delete(Thread.current)
- end
- end
- end
- end
-
- class Dataset < Sequel::Dataset
- def quote_column_ref(c); "`#{c}`"; end
-
- TRUE = '1'
- FALSE = '0'
-
- def literal(v)
- case v
- when LiteralString: v
- when String: "'#{v.gsub(/'|\\/, '\&\&')}'"
- when true: TRUE
- when false: FALSE
- else
- super
- end
- end
-
- def match_expr(l, r)
- case r
- when Regexp:
- r.casefold? ? \
- "(#{literal(l)} REGEXP #{literal(r.source)})" :
- "(#{literal(l)} REGEXP BINARY #{literal(r.source)})"
- else
- super
- end
- end
-
- # MySQL supports ORDER and LIMIT clauses in UPDATE statements.
- def update_sql(values, opts = nil)
- sql = super
-
- opts = opts ? @opts.merge(opts) : @opts
-
- if order = opts[:order]
- sql << " ORDER BY #{column_list(order)}"
- end
-
- if limit = opts[:limit]
- sql << " LIMIT #{limit}"
- end
-
- sql
- end
-
- def insert(*values)
- @db.execute_insert(insert_sql(*values))
- end
-
- def update(*args, &block)
- @db.execute_affected(update_sql(*args, &block))
- end
-
- def delete(opts = nil)
- @db.execute_affected(delete_sql(opts))
- end
-
- def fetch_rows(sql)
- @db.synchronize do
- r = @db.execute_select(sql)
- begin
- @columns = r.columns
- r.each_hash {|row| yield row}
- ensure
- r.free
- end
- end
- self
- end
-
- def array_tuples_fetch_rows(sql, &block)
- @db.synchronize do
- r = @db.execute_select(sql)
- begin
- @columns = r.columns
- r.each_array(&block)
- ensure
- r.free
- end
- end
- self
- end
- end
- end
+require 'mysql'
+
+# Monkey patch Mysql::Result to yield hashes with symbol keys
+class Mysql::Result
+ MYSQL_TYPES = {
+ 0 => :to_d, # MYSQL_TYPE_DECIMAL
+ 1 => :to_i, # MYSQL_TYPE_TINY
+ 2 => :to_i, # MYSQL_TYPE_SHORT
+ 3 => :to_i, # MYSQL_TYPE_LONG
+ 4 => :to_f, # MYSQL_TYPE_FLOAT
+ 5 => :to_f, # MYSQL_TYPE_DOUBLE
+ # 6 => ??, # MYSQL_TYPE_NULL
+ 7 => :to_time, # MYSQL_TYPE_TIMESTAMP
+ 8 => :to_i, # MYSQL_TYPE_LONGLONG
+ 9 => :to_i, # MYSQL_TYPE_INT24
+ 10 => :to_time, # MYSQL_TYPE_DATE
+ 11 => :to_time, # MYSQL_TYPE_TIME
+ 12 => :to_time, # MYSQL_TYPE_DATETIME
+ 13 => :to_i, # MYSQL_TYPE_YEAR
+ 14 => :to_time, # MYSQL_TYPE_NEWDATE
+ # 15 => :to_s # MYSQL_TYPE_VARCHAR
+ # 16 => :to_s, # MYSQL_TYPE_BIT
+ 246 => :to_d, # MYSQL_TYPE_NEWDECIMAL
+ 247 => :to_i, # MYSQL_TYPE_ENUM
+ 248 => :to_i # MYSQL_TYPE_SET
+ # 249 => :to_s, # MYSQL_TYPE_TINY_BLOB
+ # 250 => :to_s, # MYSQL_TYPE_MEDIUM_BLOB
+ # 251 => :to_s, # MYSQL_TYPE_LONG_BLOB
+ # 252 => :to_s, # MYSQL_TYPE_BLOB
+ # 253 => :to_s, # MYSQL_TYPE_VAR_STRING
+ # 254 => :to_s, # MYSQL_TYPE_STRING
+ # 255 => :to_s # MYSQL_TYPE_GEOMETRY
+ }
+
+ def convert_type(v, type)
+ v ? ((t = MYSQL_TYPES[type]) ? v.send(t) : v) : nil
+ end
+
+ def columns(with_table = nil)
+ unless @columns
+ @column_types = []
+ @columns = fetch_fields.map do |f|
+ @column_types << f.type
+ (with_table ? (f.table + "." + f.name) : f.name).to_sym
+ end
+ end
+ @columns
+ end
+
+ def each_array(with_table = nil)
+ c = columns
+ while row = fetch_row
+ c.each_with_index do |f, i|
+ if (t = MYSQL_TYPES[@column_types[i]]) && (v = row[i])
+ row[i] = v.send(t)
+ end
+ end
+ row.keys = c
+ yield row
+ end
+ end
+
+ def each_hash(with_table = nil)
+ c = columns
+ while row = fetch_row
+ h = {}
+ c.each_with_index {|f, i| h[f] = convert_type(row[i], @column_types[i])}
+ yield h
+ end
+ end
+end
+
+module Sequel
+ module MySQL
+ class Database < Sequel::Database
+ set_adapter_scheme :mysql
+
+ def serial_primary_key_options
+ {:primary_key => true, :type => :integer, :auto_increment => true}
+ end
+
+ AUTO_INCREMENT = 'AUTO_INCREMENT'.freeze
+
+ def auto_increment_sql
+ AUTO_INCREMENT
+ end
+
+ def connect
+ conn = Mysql.real_connect(@opts[:host], @opts[:user], @opts[:password],
+ @opts[:database], @opts[:port], nil, Mysql::CLIENT_MULTI_RESULTS)
+ conn.query_with_result = false
+ if encoding = @opts[:encoding] || @opts[:charset]
+ conn.query("set character_set_connection = '#{encoding}'")
+ conn.query("set character_set_client = '#{encoding}'")
+ conn.query("set character_set_results = '#{encoding}'")
+ end
+ conn.reconnect = true
+ conn
+ end
+
+ def disconnect
+ @pool.disconnect {|c| c.close}
+ end
+
+ def tables
+ @pool.hold do |conn|
+ conn.list_tables.map {|t| t.to_sym}
+ end
+ end
+
+ def dataset(opts = nil)
+ MySQL::Dataset.new(self, opts)
+ end
+
+ def execute(sql)
+ @logger.info(sql) if @logger
+ @pool.hold do |conn|
+ conn.query(sql)
+ end
+ end
+
+ def execute_select(sql)
+ @logger.info(sql) if @logger
+ @pool.hold do |conn|
+ conn.query(sql)
+ conn.use_result
+ end
+ end
+
+ def execute_insert(sql)
+ @logger.info(sql) if @logger
+ @pool.hold do |conn|
+ conn.query(sql)
+ conn.insert_id
+ end
+ end
+
+ def execute_affected(sql)
+ @logger.info(sql) if @logger
+ @pool.hold do |conn|
+ conn.query(sql)
+ conn.affected_rows
+ end
+ end
+
+ def alter_table_sql(table, op)
+ case op[:op]
+ when :rename_column
+ "ALTER TABLE #{table} CHANGE COLUMN #{literal(op[:name])} #{literal(op[:new_name])} #{op[:type]}"
+ when :set_column_type
+ "ALTER TABLE #{table} CHANGE COLUMN #{literal(op[:name])} #{literal(op[:name])} #{op[:type]}"
+ when :drop_index
+ "DROP INDEX #{default_index_name(table, op[:columns])} ON #{table}"
+ else
+ super(table, op)
+ end
+ end
+
+ def transaction
+ @pool.hold do |conn|
+ @transactions ||= []
+ if @transactions.include? Thread.current
+ return yield(conn)
+ end
+ conn.query(SQL_BEGIN)
+ begin
+ @transactions << Thread.current
+ result = yield(conn)
+ conn.query(SQL_COMMIT)
+ result
+ rescue => e
+ conn.query(SQL_ROLLBACK)
+ raise e unless SequelRollbackError === e
+ ensure
+ @transactions.delete(Thread.current)
+ end
+ end
+ end
+ end
+
+ class Dataset < Sequel::Dataset
+ def quote_column_ref(c); "`#{c}`"; end
+
+ TRUE = '1'
+ FALSE = '0'
+
+ def literal(v)
+ case v
+ when LiteralString: v
+ when String: "'#{v.gsub(/'|\\/, '\&\&')}'"
+ when true: TRUE
+ when false: FALSE
+ else
+ super
+ end
+ end
+
+ def match_expr(l, r)
+ case r
+ when Regexp:
+ r.casefold? ? \
+ "(#{literal(l)} REGEXP #{literal(r.source)})" :
+ "(#{literal(l)} REGEXP BINARY #{literal(r.source)})"
+ else
+ super
+ end
+ end
+
+ # MySQL supports ORDER and LIMIT clauses in UPDATE statements.
+ def update_sql(values, opts = nil)
+ sql = super
+
+ opts = opts ? @opts.merge(opts) : @opts
+
+ if order = opts[:order]
+ sql << " ORDER BY #{column_list(order)}"
+ end
+
+ if limit = opts[:limit]
+ sql << " LIMIT #{limit}"
+ end
+
+ sql
+ end
+
+ def insert(*values)
+ @db.execute_insert(insert_sql(*values))
+ end
+
+ def update(*args, &block)
+ @db.execute_affected(update_sql(*args, &block))
+ end
+
+ def delete(opts = nil)
+ @db.execute_affected(delete_sql(opts))
+ end
+
+ def fetch_rows(sql)
+ @db.synchronize do
+ r = @db.execute_select(sql)
+ begin
+ @columns = r.columns
+ r.each_hash {|row| yield row}
+ ensure
+ r.free
+ end
+ end
+ self
+ end
+
+ def array_tuples_fetch_rows(sql, &block)
+ @db.synchronize do
+ r = @db.execute_select(sql)
+ begin
+ @columns = r.columns
+ r.each_array(&block)
+ ensure
+ r.free
+ end
+ end
+ self
+ end
+ end
+ end
end
View
2 lib/sequel/adapters/odbc_mssql.rb
@@ -31,7 +31,7 @@ def select_sql(opts = nil)
# ADD TOP to SELECT string for LIMITS
if limit = opts[:limit]
top = "TOP #{limit} "
- raise SequelError, "Offset not supported" if opts[:offset]
+ raise Sequel::Error::OffsetNotSupported if opts[:offset]
end
columns = opts[:select]
View
4 lib/sequel/adapters/postgres.rb
@@ -227,7 +227,7 @@ def insert_result(conn, table, values)
# An error could occur if the inserted values include a primary key
# value, while the primary key is serial.
if e.message =~ RE_CURRVAL_ERROR
- raise SequelError, "Could not return primary key value for the inserted record. Are you specifying a primary key value for a serial primary key?"
+ raise Sequel::Error, "Could not return primary key value for the inserted record. Are you specifying a primary key value for a serial primary key?"
else
raise e
end
@@ -283,7 +283,7 @@ def transaction
rescue => e
@logger.info(SQL_ROLLBACK) if @logger
conn.async_exec(SQL_ROLLBACK) rescue nil
- raise e unless SequelRollbackError === e
+ raise e unless Sequel::Error::Rollback === e
ensure
conn.transaction_in_progress = nil
end
View
8 lib/sequel/adapters/sqlite.rb
@@ -71,7 +71,7 @@ def auto_vacuum
end
def auto_vacuum=(value)
- value = AUTO_VACUUM.index(value) || (raise SequelError, "Invalid value for auto_vacuum option. Please specify one of :none, :full, :incremental.")
+ value = AUTO_VACUUM.index(value) || (raise Sequel::Error, "Invalid value for auto_vacuum option. Please specify one of :none, :full, :incremental.")
pragma_set(:auto_vacuum, value)
end
@@ -82,7 +82,7 @@ def synchronous
end
def synchronous=(value)
- value = SYNCHRONOUS.index(value) || (raise SequelError, "Invalid value for synchronous option. Please specify one of :off, :normal, :full.")
+ value = SYNCHRONOUS.index(value) || (raise Sequel::Error, "Invalid value for synchronous option. Please specify one of :off, :normal, :full.")
pragma_set(:synchronous, value)
end
@@ -93,7 +93,7 @@ def temp_store
end
def temp_store=(value)
- value = TEMP_STORE.index(value) || (raise SequelError, "Invalid value for temp_store option. Please specify one of :default, :file, :memory.")
+ value = TEMP_STORE.index(value) || (raise Sequel::Error, "Invalid value for temp_store option. Please specify one of :default, :file, :memory.")
pragma_set(:temp_store, value)
end
@@ -116,7 +116,7 @@ def transaction(&block)
conn.transaction {result = yield(conn)}
result
rescue => e
- raise e unless SequelRollbackError === e
+ raise e unless Sequel::Error::Rollback === e
end
end
end
View
590 lib/sequel/array_keys.rb
@@ -1,296 +1,296 @@
-# ArrayKeys provide support for accessing array elements by keys. ArrayKeys are
-# based on the arrayfields gem by Ara Howard, and can be used as substitutes
-# for fetching records tuples as Ruby hashes.
-#
-# The main advantage offered by ArrayKeys over hashes is that the values are
-# always ordered according to the column order in the query. Another purported
-# advantage is that they reduce the memory footprint, but this has turned out
-# to be a false claim.
-module ArrayKeys
- # The KeySet module contains methods that extend an array of keys to return
- # a key's position in the key set.
- module KeySet
- # Returns the key's position in the key set. Provides indifferent access
- # for symbols and strings.
- def key_pos(key)
- @key_indexes ||= inject({}) {|h, k| h[k.to_sym] = h.size; h}
- @key_indexes[key] || @key_indexes[key.to_sym] || @key_indexes[key.to_s]
- end
-
- # Adds a key to the key set.
- def add_key(key)
- self << key
- @key_indexes[key] = @key_indexes.size
- end
-
- # Removes a key from the key set by its index.
- def del_key(idx)
- delete_at(idx)
- @key_indexes = nil # reset key indexes
- end
- end
-
- # The KeyAccess provides a large part of the Hash API for arrays with keys.
- module KeyAccess
- # Returns a value referenced by an array index or a key.
- def [](idx, *args)
- if String === idx or Symbol === idx
- (idx = @keys.key_pos(idx)) ? super(idx, *args) : nil
- else
- super
- end
- end
-
- # Sets the value referenced by an array index or a key.
- def []=(idx,*args)
- if String === idx or Symbol === idx
- idx = @keys.key_pos(idx) || @keys.add_key(idx.to_sym)
- end
- super(idx, *args)
- end
-
- # Stores a value by index or key.
- def store(k, v); self[k] = v; end
-
- # Slices the array, and returns an array with its keys sliced accordingly.
- def slice(*args)
- s = super(*args)
- s.keys = @keys.slice(*args)
- s
- end
-
- # Converts the array into a hash.
- def to_hash
- h = {}
- each_with_index {|v, i| h[@keys[i].to_sym] = v}
- h
- end
- alias_method :to_h, :to_hash
-
- # Iterates over each key-value pair in the array.
- def each_pair
- each_with_index {|v, i| yield @keys[i], v}
- end
-
- # Iterates over the array's associated keys.
- def each_key(&block)
- @keys.each(&block)
- end
-
- # Iterates over the array's values.
- def each_value(&block)
- each(&block)
- end
-
- # Deletes a value by its key.
- def delete(key, *args)
- if (idx = @keys.key_pos(key))
- delete_at(idx)
- end
- end
-
- # Deletes a value by its index.
- def delete_at(idx)
- super(idx)
- @keys = @keys.clone
- @keys.del_key(idx)
- end
-
- # Returns true if the array's key set contains the given key.
- def include?(k)
- @keys.include?(k) || @keys.include?(k.to_sym) || @keys.include?(k.to_s)
- end
-
- # Returns true if the array's key set contains the given key.
- def has_key?(k)
- @keys.include?(k) || @keys.include?(k.to_sym) || @keys.include?(k.to_s)
- end
- alias_method :member?, :has_key?
- alias_method :key?, :has_key?
-
- # Returns true if the array contains the given value.
- def has_value?(k); orig_include?(k); end
- alias_method :value?, :has_value?
-
- # Fetches a value by its key and optionally passes it through the given
- # block:
- #
- # row.fetch(:name) {|v| v.to_sym}
- #
- # You can also give a default value
- #
- # row.fetch(:name, 'untitled')
- #
- def fetch(k, *args, &block)
- if idx = @keys.key_pos(k)
- v = at idx
- else
- !args.empty? ? (v = args.first) : (raise IndexError, "key not found")
- end
- block ? block[v] : v
- end
-
- # Returns self.
- def values
- self
- end
-
- # Creates a copy of self with the same key set.
- def dup
- copy = super
- copy.keys = @keys
- copy
- end
-
- # Creates a copy of self with a copy of the key set.
- def clone
- copy = super
- copy.keys = @keys.clone
- copy
- end
-
- # Returns an array merged from self and the given array.
- def merge(values, &block)
- clone.merge!(values, &block)
- end
-
- # Merges the given array with self, optionally passing the values from self
- # through the given block:
- #
- # row.merge!(new_values) {|k, old, new| (k == :name) ? old : new}
- #
- def merge!(values, &block)
- values.each_pair do |k, v|
- self[k] = (has_key?(k) && block) ? block[k, self[k], v] : v
- end
- self
- end
- alias_method :update, :merge!
- alias_method :update!, :merge!
- end
-
- # The ArrayExtensions module provides extensions for the Array class.
- module ArrayExtensions
- attr_reader :keys
-
- # Sets the key set for the array. Once a key set has been set for an array,
- # it is extended with the KeyAccess API
- def keys=(keys)
- extend ArrayKeys::KeyAccess if keys
- @keys = keys.frozen? ? keys.dup : keys
- unless @keys.respond_to?(:key_pos)
- @keys.extend(ArrayKeys::KeySet)
- end
- end
-
- alias_method :columns, :keys
- alias_method :columns=, :keys=
- end
-
- # The DatasetExtensions module provides extensions that modify
- # a dataset to return Array tuples instead of Hash tuples.
- module DatasetExtensions
- # Fetches a dataset's records, converting each tuple into an array with
- # keys.
- def array_tuples_each(opts = nil, &block)
- fetch_rows(select_sql(opts)) {|h| block[Array.from_hash(h)]}
- end
-
- # Provides the corresponding behavior to Sequel::Dataset#update_each_method,
- # using array tuples.
- def array_tuples_update_each_method
- # warning: ugly code generation ahead
- if @row_proc && @transform
- class << self
- def each(opts = nil, &block)
- if opts && opts[:naked]
- fetch_rows(select_sql(opts)) {|r| block[transform_load(Array.from_hash(r))]}
- else
- fetch_rows(select_sql(opts)) {|r| block[@row_proc[transform_load(Array.from_hash(r))]]}
- end
- self
- end
- end
- elsif @row_proc
- class << self
- def each(opts = nil, &block)
- if opts && opts[:naked]
- fetch_rows(select_sql(opts)) {|r| block[Array.from_hash(r)]}
- else
- fetch_rows(select_sql(opts)) {|r| block[@row_proc[Array.from_hash(r)]]}
- end
- self
- end
- end
- elsif @transform
- class << self
- def each(opts = nil, &block)
- fetch_rows(select_sql(opts)) {|r| block[transform_load(Array.from_hash(r))]}
- self
- end
- end
- else
- class << self
- def each(opts = nil, &block)
- fetch_rows(select_sql(opts)) {|r| block[Array.from_hash(r)]}
- self
- end
- end
- end
- end
- end
-end
-
-# Array extensions.
-class Array
- alias_method :orig_include?, :include?
-
- include ArrayKeys::ArrayExtensions
-
- # Converts a hash into an array with keys.
- def self.from_hash(h)
- a = []; a.keys = []
- a.merge!(h)
- end
-end
-
-module Sequel
- # Modifies all dataset classes to fetch records as arrays with keys. By
- # default records are fetched as hashes.
- def self.use_array_tuples
- Dataset.dataset_classes.each do |c|
- c.class_eval do
- if method_defined?(:array_tuples_fetch_rows)
- alias_method :hash_tuples_fetch_rows, :fetch_rows
- alias_method :fetch_rows, :array_tuples_fetch_rows
- else
- alias_method :orig_each, :each
- alias_method :orig_update_each_method, :update_each_method
- include ArrayKeys::DatasetExtensions
- alias_method :each, :array_tuples_each
- alias_method :update_each_method, :array_tuples_update_each_method
- end
- end
- end
- end
-
- # Modifies all dataset classes to fetch records as hashes.
- def self.use_hash_tuples
- Dataset.dataset_classes.each do |c|
- c.class_eval do
- if method_defined?(:hash_tuples_fetch_rows)
- alias_method :fetch_rows, :hash_tuples_fetch_rows
- else
- if method_defined?(:orig_each)
- alias_method :each, :orig_each
- undef_method :orig_each
- end
- if method_defined?(:orig_update_each_method)
- alias_method :update_each_method, :orig_update_each_method
- undef_method :orig_update_each_method
- end
- end
- end
- end
- end
+# ArrayKeys provide support for accessing array elements by keys. ArrayKeys are
+# based on the arrayfields gem by Ara Howard, and can be used as substitutes
+# for fetching records tuples as Ruby hashes.
+#
+# The main advantage offered by ArrayKeys over hashes is that the values are
+# always ordered according to the column order in the query. Another purported
+# advantage is that they reduce the memory footprint, but this has turned out
+# to be a false claim.
+module ArrayKeys
+ # The KeySet module contains methods that extend an array of keys to return
+ # a key's position in the key set.
+ module KeySet
+ # Returns the key's position in the key set. Provides indifferent access
+ # for symbols and strings.
+ def key_pos(key)
+ @key_indexes ||= inject({}) {|h, k| h[k.to_sym] = h.size; h}
+ @key_indexes[key] || @key_indexes[key.to_sym] || @key_indexes[key.to_s]
+ end
+
+ # Adds a key to the key set.
+ def add_key(key)
+ self << key
+ @key_indexes[key] = @key_indexes.size
+ end
+
+ # Removes a key from the key set by its index.
+ def del_key(idx)
+ delete_at(idx)
+ @key_indexes = nil # reset key indexes
+ end
+ end
+
+ # The KeyAccess provides a large part of the Hash API for arrays with keys.
+ module KeyAccess
+ # Returns a value referenced by an array index or a key.
+ def [](idx, *args)
+ if String === idx or Symbol === idx
+ (idx = @keys.key_pos(idx)) ? super(idx, *args) : nil
+ else
+ super
+ end
+ end
+
+ # Sets the value referenced by an array index or a key.
+ def []=(idx,*args)
+ if String === idx or Symbol === idx
+ idx = @keys.key_pos(idx) || @keys.add_key(idx.to_sym)
+ end
+ super(idx, *args)
+ end
+
+ # Stores a value by index or key.
+ def store(k, v); self[k] = v; end
+
+ # Slices the array, and returns an array with its keys sliced accordingly.
+ def slice(*args)
+ s = super(*args)
+ s.keys = @keys.slice(*args)
+ s
+ end
+
+ # Converts the array into a hash.
+ def to_hash
+ h = {}
+ each_with_index {|v, i| h[@keys[i].to_sym] = v}
+ h
+ end
+ alias_method :to_h, :to_hash
+
+ # Iterates over each key-value pair in the array.
+ def each_pair
+ each_with_index {|v, i| yield @keys[i], v}
+ end
+
+ # Iterates over the array's associated keys.
+ def each_key(&block)
+ @keys.each(&block)
+ end
+
+ # Iterates over the array's values.
+ def each_value(&block)
+ each(&block)
+ end
+
+ # Deletes a value by its key.
+ def delete(key, *args)
+ if (idx = @keys.key_pos(key))
+ delete_at(idx)
+ end
+ end
+
+ # Deletes a value by its index.
+ def delete_at(idx)
+ super(idx)
+ @keys = @keys.clone
+ @keys.del_key(idx)
+ end
+
+ # Returns true if the array's key set contains the given key.
+ def include?(k)
+ @keys.include?(k) || @keys.include?(k.to_sym) || @keys.include?(k.to_s)
+ end
+
+ # Returns true if the array's key set contains the given key.
+ def has_key?(k)
+ @keys.include?(k) || @keys.include?(k.to_sym) || @keys.include?(k.to_s)
+ end
+ alias_method :member?, :has_key?
+ alias_method :key?, :has_key?
+
+ # Returns true if the array contains the given value.
+ def has_value?(k); orig_include?(k); end
+ alias_method :value?, :has_value?
+
+ # Fetches a value by its key and optionally passes it through the given
+ # block:
+ #
+ # row.fetch(:name) {|v| v.to_sym}
+ #
+ # You can also give a default value
+ #
+ # row.fetch(:name, 'untitled')
+ #
+ def fetch(k, *args, &block)
+ if idx = @keys.key_pos(k)
+ v = at idx
+ else
+ !args.empty? ? (v = args.first) : (raise Sequel::Error::Index, "key not found")
+ end
+ block ? block[v] : v
+ end
+
+ # Returns self.
+ def values
+ self
+ end
+
+ # Creates a copy of self with the same key set.
+ def dup
+ copy = super
+ copy.keys = @keys
+ copy
+ end
+
+ # Creates a copy of self with a copy of the key set.
+ def clone
+ copy = super
+ copy.keys = @keys.clone
+ copy
+ end
+
+ # Returns an array merged from self and the given array.
+ def merge(values, &block)
+ clone.merge!(values, &block)
+ end
+
+ # Merges the given array with self, optionally passing the values from self
+ # through the given block:
+ #
+ # row.merge!(new_values) {|k, old, new| (k == :name) ? old : new}
+ #
+ def merge!(values, &block)
+ values.each_pair do |k, v|
+ self[k] = (has_key?(k) && block) ? block[k, self[k], v] : v
+ end
+ self
+ end
+ alias_method :update, :merge!
+ alias_method :update!, :merge!
+ end
+
+ # The ArrayExtensions module provides extensions for the Array class.
+ module ArrayExtensions
+ attr_reader :keys
+
+ # Sets the key set for the array. Once a key set has been set for an array,
+ # it is extended with the KeyAccess API
+ def keys=(keys)
+ extend ArrayKeys::KeyAccess if keys
+ @keys = keys.frozen? ? keys.dup : keys
+ unless @keys.respond_to?(:key_pos)
+ @keys.extend(ArrayKeys::KeySet)
+ end
+ end
+
+ alias_method :columns, :keys
+ alias_method :columns=, :keys=
+ end
+
+ # The DatasetExtensions module provides extensions that modify
+ # a dataset to return Array tuples instead of Hash tuples.
+ module DatasetExtensions
+ # Fetches a dataset's records, converting each tuple into an array with
+ # keys.
+ def array_tuples_each(opts = nil, &block)
+ fetch_rows(select_sql(opts)) {|h| block[Array.from_hash(h)]}
+ end
+
+ # Provides the corresponding behavior to Sequel::Dataset#update_each_method,
+ # using array tuples.
+ def array_tuples_update_each_method
+ # warning: ugly code generation ahead
+ if @row_proc && @transform
+ class << self
+ def each(opts = nil, &block)
+ if opts && opts[:naked]
+ fetch_rows(select_sql(opts)) {|r| block[transform_load(Array.from_hash(r))]}
+ else
+ fetch_rows(select_sql(opts)) {|r| block[@row_proc[transform_load(Array.from_hash(r))]]}
+ end
+ self
+ end
+ end
+ elsif @row_proc
+ class << self
+ def each(opts = nil, &block)
+ if opts && opts[:naked]
+ fetch_rows(select_sql(opts)) {|r| block[Array.from_hash(r)]}
+ else
+ fetch_rows(select_sql(opts)) {|r| block[@row_proc[Array.from_hash(r)]]}
+ end
+ self
+ end
+ end
+ elsif @transform
+ class << self
+ def each(opts = nil, &block)
+ fetch_rows(select_sql(opts)) {|r| block[transform_load(Array.from_hash(r))]}
+ self
+ end
+ end
+ else
+ class << self
+ def each(opts = nil, &block)
+ fetch_rows(select_sql(opts)) {|r| block[Array.from_hash(r)]}
+ self
+ end
+ end
+ end
+ end
+ end
+end
+
+# Array extensions.
+class Array
+ alias_method :orig_include?, :include?
+
+ include ArrayKeys::ArrayExtensions
+
+ # Converts a hash into an array with keys.
+ def self.from_hash(h)
+ a = []; a.keys = []
+ a.merge!(h)
+ end
+end
+
+module Sequel
+ # Modifies all dataset classes to fetch records as arrays with keys. By
+ # default records are fetched as hashes.
+ def self.use_array_tuples
+ Dataset.dataset_classes.each do |c|
+ c.class_eval do
+ if method_defined?(:array_tuples_fetch_rows)
+ alias_method :hash_tuples_fetch_rows, :fetch_rows
+ alias_method :fetch_rows, :array_tuples_fetch_rows
+ else
+ alias_method :orig_each, :each
+ alias_method :orig_update_each_method, :update_each_method
+ include ArrayKeys::DatasetExtensions
+ alias_method :each, :array_tuples_each
+ alias_method :update_each_method, :array_tuples_update_each_method
+ end
+ end
+ end
+ end
+
+ # Modifies all dataset classes to fetch records as hashes.
+ def self.use_hash_tuples
+ Dataset.dataset_classes.each do |c|
+ c.class_eval do
+ if method_defined?(:hash_tuples_fetch_rows)
+ alias_method :fetch_rows, :hash_tuples_fetch_rows
+ else
+ if method_defined?(:orig_each)
+ alias_method :each, :orig_each
+ undef_method :orig_each
+ end
+ if method_defined?(:orig_update_each_method)
+ alias_method :update_each_method, :orig_update_each_method
+ undef_method :orig_update_each_method
+ end
+ end
+ end
+ end
+ end
end
View
2 lib/sequel/connection_pool.rb
@@ -107,7 +107,7 @@ def make_new
if @created_count < @max_size
@created_count += 1
@connection_proc ? @connection_proc.call : \
- (raise SequelError, "No connection proc specified")
+ (raise Sequel::NoConnectionProcError)
end
end
View
7 lib/sequel/core_sql.rb
@@ -47,7 +47,12 @@ def lit
# Converts a string into a Time object.
def to_time
- Time.parse(self)
+ begin
+ Time.parse(self)
+ rescue Exception => error
+ raise Sequel::Error::InvalidValue, "Invalid time value '#{self}' (#{error.message})"
+ end
+ # Why does Time.parse('0000-00-00') bork and not return nil or some such?
end
end
View
12 lib/sequel/database.rb
@@ -31,13 +31,13 @@ def initialize(opts = {}, &block)
# Connects to the database. This method should be overriden by descendants.
def connect
- raise NotImplementedError, "#connect should be overriden by adapters"
+ raise Sequel::Error::NotImplemented, "#connect should be overriden by adapters"
end
# Disconnects from the database. This method should be overriden by
# descendants.
def disconnect
- raise NotImplementedError, "#disconnect should be overriden by adapters"
+ raise Sequel::Error::NotImplemented, "#disconnect should be overriden by adapters"
end
# Returns true if the database is using a multi-threaded connection pool.
@@ -135,9 +135,9 @@ def [](*args)
(String === args.first) ? fetch(*args) : from(*args)
end
- # Raises a NotImplementedError. This method is overriden in descendants.
+ # Raises a Sequel::Error::NotImplemented. This method is overriden in descendants.
def execute(sql)
- raise NotImplementedError
+ raise Sequel::Error::NotImplemented
end
# Executes the supplied SQL statement. The SQL can be supplied as a string
@@ -272,7 +272,7 @@ def transaction
result
rescue => e
conn.execute(SQL_ROLLBACK)
- raise e unless SequelRollbackError === e
+ raise e unless Sequel::Error::Rollback === e
ensure
@transactions.delete(Thread.current)
end
@@ -318,7 +318,7 @@ def self.adapter_class(scheme)
require File.join(File.dirname(__FILE__), "adapters/#{scheme}")
c = @@adapters[scheme.to_sym]
end
- raise SequelError, "Invalid database scheme" unless c
+ raise Sequel::Error::InvaildDatabaseScheme unless c
c
end
View
20 lib/sequel/dataset.rb
@@ -111,7 +111,7 @@ def fetch_rows(sql, &block)
# r = @db.execute(sql)
# r.each(&block)
# end
- raise NotImplementedError, NOTIMPL_MSG
+ raise Sequel::Error::NotImplemented, NOTIMPL_MSG
end
# Inserts values into the associated table. Adapters should override this
@@ -120,23 +120,23 @@ def insert(*values)
# @db.synchronize do
# @db.execute(insert_sql(*values)).last_insert_id
# end
- raise NotImplementedError, NOTIMPL_MSG
+ raise Sequel::Error::NotImplemented, NOTIMPL_MSG
end
# Updates values for the dataset. Adapters should override this method.
def update(values, opts = nil)
# @db.synchronize do
# @db.execute(update_sql(values, opts)).affected_rows
# end
- raise NotImplementedError, NOTIMPL_MSG
+ raise Sequel::Error::NotImplemented, NOTIMPL_MSG
end
# Deletes the records in the dataset. Adapters should override this method.
def delete(opts = nil)
# @db.synchronize do
# @db.execute(delete_sql(opts)).affected_rows
# end
- raise NotImplementedError, NOTIMPL_MSG
+ raise Sequel::Error::NotImplemented, NOTIMPL_MSG
end
# Returns the columns in the result set in their true order. The stock
@@ -238,16 +238,16 @@ def set_model(key, *args)
extend_with_destroy
when Symbol:
# polymorphic model
- hash = args.shift || raise(SequelError, "No class hash supplied for polymorphic model")
+ hash = args.shift || raise(Sequel::Error, "No class hash supplied for polymorphic model")
@opts.merge!(:naked => true, :models => hash, :polymorphic_key => key)
set_row_proc do |h|
c = hash[h[key]] || hash[nil] || \
- raise(SequelError, "No matching model class for record (#{polymorphic_key} => #{h[polymorphic_key].inspect})")
+ raise(Sequel::Error, "No matching model class for record (#{polymorphic_key} => #{h[polymorphic_key].inspect})")
c.new(h, *args)
end
extend_with_destroy
else
- raise SequelError, "Invalid parameters specified"
+ raise Sequel::Error::InvalidParameters
end
self
end
@@ -302,11 +302,11 @@ def transform(t)
case v
when Array:
if (v.size != 2) || !v.first.is_a?(Proc) && !v.last.is_a?(Proc)
- raise SequelError, "Invalid transform specified"
+ raise Sequel::Error::InvalidTransform
end
else
unless v = STOCK_TRANSFORMS[v]
- raise SequelError, "Invalid transform specified"
+ raise Sequel::Error::InvalidTransform
else
t[k] = v
end
@@ -384,7 +384,7 @@ def each(opts = nil, &block)
def extend_with_destroy
unless respond_to?(:destroy)
meta_def(:destroy) do
- raise SequelError, 'Dataset not associated with model' unless @opts[:models]
+ raise Sequel::Error::NoDatasetAssociatedWithModel unless @opts[:models]
count = 0
@db.transaction {each {|r| count += 1; r.destroy}}
count
View
10 lib/sequel/dataset/convenience.rb
@@ -59,7 +59,7 @@ def []=(conditions, values)
# record is returned. Otherwise an array is returned with the last
# <i>num</i> records.
def last(*args)
- raise SequelError, 'No order specified' unless
+ raise Sequel::Error, 'No order specified' unless
@opts[:order] || (opts && opts[:order])
args = args.empty? ? 1 : (args.size == 1) ? args.first : args
@@ -238,10 +238,10 @@ def multi_insert(list, opts = {})
end
module QueryBlockCopy #:nodoc:
- def each(*args); raise SequelError, "#each cannot be invoked inside a query block."; end
- def insert(*args); raise SequelError, "#insert cannot be invoked inside a query block."; end
- def update(*args); raise SequelError, "#update cannot be invoked inside a query block."; end
- def delete(*args); raise SequelError, "#delete cannot be invoked inside a query block."; end
+ def each(*args); raise Sequel::Error, "#each cannot be invoked inside a query block."; end
+ def insert(*args); raise Sequel::Error, "#insert cannot be invoked inside a query block."; end
+ def update(*args); raise Sequel::Error, "#update cannot be invoked inside a query block."; end
+ def delete(*args); raise Sequel::Error, "#delete cannot be invoked inside a query block."; end
def clone_merge(opts)
@opts.merge!(opts)
View
10 lib/sequel/dataset/sequelizer.rb
@@ -63,7 +63,7 @@ def match_expr(l, r)
when String:
"(#{literal(l)} LIKE #{literal(r)})"
else
- raise SequelError, "Unsupported match pattern class (#{r.class})."
+ raise Sequel::Error::UnsupportedMatchPatternClass, r.class
end
end
@@ -284,11 +284,11 @@ def eval_expr(e, b, opts)
# assignment
l = e[1]
r = eval_expr(e[2], b, opts)
- raise SequelError, "Invalid expression #{l} = #{r}. Did you mean :#{l} == #{r}?"
+ raise Sequel::Error::InvalidExpression, "#{l} = #{r}. Did you mean :#{l} == #{r}?"
when :if, :dstr
ext_expr(e, b, opts)
else
- raise SequelError, "Invalid expression tree: #{e.inspect}"
+ raise Sequel::Error::InvalidExpressionTree, e.inspect
end
end
@@ -338,7 +338,7 @@ def proc_to_sql(proc, opts = {})
rescue Exception
module Sequel::Dataset::Sequelizer
def proc_to_sql(proc)
- raise SequelError, "You must have the ParseTree gem installed in order to use block filters."
+ raise Sequel::Error, "You must have the ParseTree gem installed in order to use block filters."
end
end
end
@@ -348,7 +348,7 @@ def proc_to_sql(proc)
rescue Exception
module Sequel::Dataset::Sequelizer
def ext_expr(e)
- raise SequelError, "You must have the Ruby2Ruby gem installed in order to use this block filter."
+ raise Sequel::Error, "You must have the Ruby2Ruby gem installed in order to use this block filter."
end
end
end
View
24 lib/sequel/dataset/sql.rb
@@ -45,7 +45,7 @@ def column_list(columns)
# Converts an array of sources names into into a comma separated list.
def source_list(source)
if source.nil? || source.empty?
- raise SequelError, 'No source specified for query'
+ raise Sequel::Error, 'No source specified for query'
end
auto_alias_count = 0
m = source.map do |i|
@@ -98,7 +98,7 @@ def literal(v)
when Date: v.strftime(DATE_FORMAT)
when Dataset: "(#{v.sql})"
else
- raise SequelError, "can't express #{v.inspect} as a SQL literal"
+ raise Sequel::Error, "can't express #{v.inspect} as a SQL literal"
end
end
@@ -204,7 +204,7 @@ def filter(*cond, &block)
clause = (@opts[:group] ? :having : :where)
cond = cond.first if cond.size == 1
if cond === true || cond === false
- raise SequelError, "Invalid filter specified. Did you mean to supply a block?"
+ raise Sequel::Error, "Invalid filter specified. Did you mean to supply a block?"
end
parenthesize = !(cond.is_a?(Hash) || cond.is_a?(Array))
filter = cond.is_a?(Hash) && cond
@@ -228,7 +228,7 @@ def or(*cond, &block)
r = expression_list(block || cond, parenthesize)
clone_merge(clause => "#{l} OR #{r}")
else
- raise SequelError, "No existing filter found."
+ raise Sequel::Error::NoExistingFilter
end
end
@@ -238,7 +238,7 @@ def or(*cond, &block)
def and(*cond, &block)
clause = (@opts[:group] ? :having : :where)
unless @opts[clause]
- raise SequelError, "No existing filter found."
+ raise Sequel::Error::NoExistingFilter
end
filter(*cond, &block)
end
@@ -265,7 +265,7 @@ def exclude(*cond, &block)
# if the dataset has been grouped. See also #filter.
def where(*cond, &block)
if @opts[:group]
- raise SequelError, "Can't specify a WHERE clause once the dataset has been grouped"
+ raise Sequel::Error, "Can't specify a WHERE clause once the dataset has been grouped"
else
filter(*cond, &block)
end
@@ -275,7 +275,7 @@ def where(*cond, &block)
# if the dataset has not been grouped. See also #filter
def having(*cond, &block)
unless @opts[:group]
- raise SequelError, "Can only specify a HAVING clause on a grouped dataset"
+ raise Sequel::Error, "Can only specify a HAVING clause on a grouped dataset"
else
filter(*cond, &block)
end
@@ -310,7 +310,7 @@ def except(dataset, all = false)
def join_expr(type, table, expr)
join_type = JOIN_TYPES[type || :inner]
unless join_type
- raise SequelError, "Invalid join type: #{type}"
+ raise Sequel::Error::InvalidJoinType, type
end
join_conditions = {}
@@ -471,9 +471,9 @@ def update_sql(values = {}, opts = nil, &block)
opts = opts ? @opts.merge(opts) : @opts
if opts[:group]
- raise SequelError, "Can't update a grouped dataset"
+ raise Sequel::Error::UpdateGroupedDataset
elsif (opts[:from].size > 1) or opts[:join]
- raise SequelError, "Can't update a joined dataset"
+ raise Sequel::Error::UpdateJoinedDataset
end
sql = "UPDATE #{@opts[:from]} SET "
@@ -507,9 +507,9 @@ def delete_sql(opts = nil)
opts = opts ? @opts.merge(opts) : @opts
if opts[:group]
- raise SequelError, "Can't delete from a grouped dataset"
+ raise Sequel::Error::DeleteGroupedDataset
elsif opts[:from].is_a?(Array) && opts[:from].size > 1
- raise SequelError, "Can't delete from a joined dataset"
+ raise Sequel::Error::DeleteJoinedDataset
end
sql = "DELETE FROM #{opts[:from]}"
View
22 lib/sequel/error.rb
@@ -1,22 +0,0 @@
-# Represents an error raised in Sequel code.
-class SequelError < StandardError
-end
-
-# SequelRollbackError is a special error used to rollback a transactions.
-# A transaction block will catch this error and wont pass further up the stack.
-class SequelRollbackError < StandardError
-end
-
-# Object extensions
-class Object
- # Cancels the current transaction without an error:
- #
- # DB.tranaction do
- # ...
- # rollback! if failed_to_contact_client
- # ...
- # end
- def rollback!
- raise SequelRollbackError
- end
-end
View
91 lib/sequel/exceptions.rb
@@ -0,0 +1,91 @@
+module Sequel
+ # Represents an error raised in Sequel code.
+ class Error < StandardError
+
+ # Rollback is a special error used to rollback a transactions.
+ # A transaction block will catch this error and wont pass further up the stack.
+ class Rollback < Error ; end
+
+ # Represents an invalid value stored in the database.
+ class InvalidValue < Error ; end
+
+ # Represents invalid arguments p assed to sequel methods.
+ class Argument < Error ; end
+
+ # Represents Invalid parameters passed to a sequel method.
+ class InvalidParameters < Error ; end
+
+ # Represents an Invalid transfor m.
+ class InvalidTransform < Error ; end
+
+ # Represents an Invalid filter.
+ class InvalidFilter < Error ; end
+
+ # Represents a failure to provid e a connection proc for the connection pool.
+ class NoConnectionProc < Error ; end
+
+ # Represents missing a required connection string.
+ class NoConnectionString < Error ; end
+
+ # Represents an attempt to perfo rming filter operations when no filter has been specified yet.
+ class NoExistingFilter < Error ; end
+
+ # Represents an invalid join typ e.
+ class InvalidJoinType < Error ; end
+
+ # Represents an attempt to perfo rm an update on a grouped dataset.
+ class UpdateGroupedDataset < Error ; end
+
+ # Represents an attempt to perfo rm an update on a joined dataset.
+ class UpdateJoinedDataset < Error ; end
+
+ # Represents an attempt to perfo rm an delete from a grouped dataset.
+ class DeleteGroupedDataset < Error ; end
+
+ # Represents an attempt to perfo rm an delete from a joined dataset.
+ class DeleteJoinedDataset < Error ; end
+
+ class InvalidMigrationDirection < Error ; end
+
+ class NoCurrentVersionAvailable < Error ; end
+
+ class NoTargetVersionAvailable < Error ; end
+
+ class OffsetNotSupported < Error ; end
+
+ # Represents a model that has no associated dataset.
+ class NoDatasetAssociatedWithModel < Error ; end
+
+ # Represents a model with no primary key specified.
+ class NoPrimaryKeyForModel < Error ; end
+
+ class UnsupportedMatchPatternClass < Error ; end
+
+ class Index < Error ; end
+
+ class Name < Error ; end
+
+ class InvalidExpression < Error ; end
+
+ class InvalidExpressionTree < Error ; end
+
+ class ChainBroken < RuntimeError ; end
+
+ class WorkerStopError < RuntimeError ; end
+
+ end
+end
+
+# Object extensions
+class Object
+ # Cancels the current transaction without an error:
+ #
+ # DB.tranaction do
+ # ...
+ # rollback! if failed_to_contact_client
+ # ...
+ # end
+ def rollback!
+ raise Sequel::RollbackError
+ end
+end
View
6 lib/sequel/migration.rb
@@ -53,7 +53,7 @@ def self.apply(db, direction)
when :up: obj.up
when :down: obj.down
else
- raise SequelError, "Invalid migration direction (#{direction})"
+ raise Sequel::Error::InvalidMigrationDirection, direction
end
end
@@ -104,8 +104,8 @@ def self.apply(db, directory, target = nil, current = nil)
# determine current and target version and direction
current ||= get_current_migration_version(db)
target ||= latest_migration_version(directory)
- raise SequelError, "No current version available" if current.nil?
- raise SequelError, "No target version available" if target.nil?
+ raise Sequel::Error::NoCurrentVersionAvailable if current.nil?
+ raise Sequel::Error::NoTargetVersionAvailable if target.nil?
direction = current < target ? :up : :down
View
2 lib/sequel/model.rb
@@ -256,7 +256,7 @@ def self.find(*args, &block)
def self.[](*args)
args = args.first if (args.size == 1)
if args === true || args === false
- raise SequelError, "Invalid filter specified. Did you mean to supply a hash?"
+ raise Sequel::Error::InvalidFilter, "Did you mean to supply a hash?"
end
dataset[(Hash === args) ? args : primary_key_hash(args)]
end
View
6 lib/sequel/model/base.rb
@@ -3,7 +3,7 @@ class Model
# Returns the database associated with the Model class.
def self.db
@db ||= (superclass != Object) && superclass.db or
- raise SequelError, "No database associated with #{self}"
+ raise Sequel::Error, "No database associated with #{self}"
end
# Sets the database associated with the Model class.
@@ -20,7 +20,7 @@ def self.database_opened(db)
# Returns the dataset associated with the Model class.
def self.dataset
@dataset || super_dataset or
- raise SequelError, "No dataset associated with #{self}"
+ raise Sequel::Error, "No dataset associated with #{self}"
end
def self.super_dataset # :nodoc:
@@ -32,7 +32,7 @@ def self.super_dataset # :nodoc:
# See Dataset#columns for more information.
def self.columns
@columns ||= @dataset.columns or
- raise SequelError, "Could not fetch columns for #{self}"
+ raise Sequel::Error, "Could not fetch columns for #{self}"
end
# Sets the dataset associated with the Model class.
View
4 lib/sequel/model/hooks.rb
@@ -1,9 +1,5 @@
module Sequel
class Model
-
- class ChainBroken < RuntimeError # :nodoc:
- end
-
# This Hash translates verbs to methodnames used in chain manipulation
# methods.
VERB_TO_METHOD = {:prepend => :unshift, :append => :push}
View
18 lib/sequel/model/record.rb
@@ -87,7 +87,7 @@ def self.set_primary_key(*key)
@pk ||= {key => @values[key]}
end
class_def(:cache_key) do
- pk = @values[key] || (raise SequelError, 'no primary key for this record')
+ pk = @values[key] || (raise Sequel::Error, 'no primary key for this record')
@cache_key ||= "#{self.class}:#{pk}"
end
meta_def(:primary_key_hash) do |v|
@@ -118,11 +118,11 @@ def self.set_primary_key(*key)
def self.no_primary_key #:nodoc:
meta_def(:primary_key) {nil}
- meta_def(:primary_key_hash) {|v| raise SequelError, "#{self} does not have a primary key"}
- class_def(:this) {raise SequelError, "No primary key is associated with this model"}
- class_def(:pk) {raise SequelError, "No primary key is associated with this model"}
- class_def(:pk_hash) {raise SequelError, "No primary key is associated with this model"}
- class_def(:cache_key) {raise SequelError, "No primary key is associated with this model"}
+ meta_def(:primary_key_hash) {|v| raise Sequel::Error, "#{self} does not have a primary key"}
+ class_def(:this) {raise Sequel::Error::NoPrimaryKeyForModel}
+ class_def(:pk) {raise Sequel::Error::NoPrimaryKeyForModel}
+ class_def(:pk_hash) {raise Sequel::Error::NoPrimaryKeyForModel}
+ class_def(:cache_key) {raise Sequel::Error::NoPrimaryKeyForModel}
end
# Creates new instance with values set to passed-in Hash ensuring that
@@ -149,7 +149,7 @@ def this
# Returns a key unique to the underlying record for caching
def cache_key
- pk = @values[:id] || (raise SequelError, 'no primary key for this record')
+ pk = @values[:id] || (raise Sequel::Error, 'no primary key for this record')
@cache_key ||= "#{self.class}:#{pk}"
end
@@ -251,7 +251,7 @@ def set(values)
# Reloads values from database and returns self.
def refresh
- @values = this.first || raise(SequelError, "Record not found")
+ @values = this.first || raise(Sequel::Error, "Record not found")
self
end
@@ -285,7 +285,7 @@ def method_missing(m, *args) #:nodoc:
end
# otherwise, raise an error
- raise SequelError, "Invalid column (#{att.inspect}) for #{self}"
+ raise Sequel::Error, "Invalid column (#{att.inspect}) for #{self}"
end
# define the column accessor
View
4 lib/sequel/model/relations.rb
@@ -35,7 +35,7 @@ def self.one_to_one(name, opts)
end
from = opts[:from]
- from || (raise SequelError, "No association source defined (use :from option)")
+ from || (raise Sequel::Error, "No association source defined (use :from option)")
key = opts[:key] || (name.to_s + ID_POSTFIX).to_sym
setter_name = "#{name}=".to_sym
@@ -93,7 +93,7 @@ def self.one_to_many(name, opts)
from = opts[:from]
- from || (raise SequelError, "No association source defined (use :from option)")
+ from || (raise Sequel::Error, "No association source defined (use :from option)")
key = opts[:key] || (self.to_s + ID_POSTFIX).to_sym
case from
View
15 lib/sequel/worker.rb
@@ -1,9 +1,9 @@
-require 'thread'
+require "thread"
module Sequel
+
class Worker < Thread
- class WorkerStopError < RuntimeError; end
-
+
attr_reader :queue
attr_reader :errors
@@ -18,7 +18,7 @@ def initialize(db = nil)
def work
loop {next_job}
- rescue WorkerStopError # signals the worker thread to stop
+ rescue Sequel::Error::WorkerStop # signals the worker thread to stop
ensure
rollback! if @transaction && !@errors.empty?
end
@@ -38,20 +38,21 @@ def join
while busy?
sleep 0.1
end
- self.raise WorkerStopError
+ self.raise Sequel::Error::WorkerStop
super
end
private
def next_job
@cur = @queue.pop
@cur.call
- rescue WorkerStopError => e
+ rescue Sequel::Error::WorkerStop => e
raise e
rescue Exception => e
@errors << e
ensure
@cur = nil
end
end
-end
+
+end
View
10 spec/adapters/sqlite_spec.rb
@@ -41,7 +41,7 @@
@db.auto_vacuum = :none
@db.auto_vacuum.should == :none
- proc {@db.auto_vacuum = :invalid}.should raise_error(SequelError)
+ proc {@db.auto_vacuum = :invalid}.should raise_error(Sequel::Error)
end
specify "should support getting and setting the synchronous pragma" do
@@ -52,7 +52,7 @@
@db.synchronous = :full
@db.synchronous.should == :full
- proc {@db.synchronous = :invalid}.should raise_error(SequelError)
+ proc {@db.synchronous = :invalid}.should raise_error(Sequel::Error)
end
specify "should support getting and setting the temp_store pragma" do
@@ -63,7 +63,7 @@
@db.temp_store = :memory
@db.temp_store.should == :memory
- proc {@db.temp_store = :invalid}.should raise_error(SequelError)
+ proc {@db.temp_store = :invalid}.should raise_error(Sequel::Error)
end
specify "should be able to execute multiple statements at once" do
@@ -87,8 +87,8 @@
proc {@db.transaction do
@db.create_table(:u) {text :name}
- raise ArgumentError
- end}.should raise_error(ArgumentError)
+ raise Sequel::Error::Argument
+ end}.should raise_error(Sequel::Error::Argument)
# no commit
@db.tables.should == [:t]
View
1,086 spec/array_keys_spec.rb
@@ -1,544 +1,544 @@
-require File.join(File.dirname(__FILE__), 'spec_helper')
-
-context "An array with symbol keys" do
- setup do
- @a = [1, 2, 3]
- @a.keys = [:a, :b, :c]
- end
-
- specify "should provide subscript access" do
- @a[0].should == 1
- @a[0..1].should == [1, 2]
-
- @a[1] = 4
- @a.should == [1, 4, 3]
- end
-
- specify "should provide key access using symbols" do
- @a[:a].should == 1
- @a[:b].should == 2
- @a[:B].should == nil
-
- @a[:a] = 11
- @a.should == [11, 2, 3]
- @a[:a].should == 11
-
- @a[:d] = 4
- @a.should == [11, 2, 3, 4]
- @a.keys.should == [:a, :b, :c, :d]
- end
-
- specify "should provide key access using strings" do
- @a['a'].should == 1
- @a['A'].should be_nil
-
- @a['d'] = 4
- @a.should == [1, 2, 3, 4]
- @a.keys.should == [:a, :b, :c, :d]
- end
-
- specify "should provide #store functionality" do
- @a.store(:a, 11)
- @a.should == [11, 2, 3]
-
- @a.store(:d, 4)
- @a.should == [11, 2, 3, 4]
-
- @a.store('d', 44)
- @a.should == [11, 2, 3, 44]
- end
-
- specify "should provide #to_hash/#to_h functionality" do
- @a.to_hash.should == {:a => 1, :b => 2, :c => 3}
- @a.to_h.should == {:a => 1, :b => 2, :c => 3}
- end
-
- specify "should provide #columns as alias to #keys" do
- @a.columns.should == [:a, :b, :c]
- @a.columns = [:x, :y, :z]
-
- @a[:x].should == 1
- end
-
- specify "should provide #slice functionality with keys" do
- s = @a.slice(0, 2)
- s.should == [1, 2]
- s.keys.should == [:a, :b]
-
- s = @a.slice(1..2)
- s.should == [2, 3]
- s.keys.should == [:b, :c]
- end
-
- specify "should provide #each_pair iterator" do
- pairs = []
- @a.each_pair {|k, v| pairs << [k, v]}
- pairs.should == [[:a, 1], [:b, 2], [:c, 3]]
- end
-
- specify "should provide stock #delete functionality for arrays without keys" do
- a = [1, 2, 3]
- a.delete(2)
- a.should == [1, 3]
- end
-
- specify "should provide key-based #delete functionality" do
- @a.delete(:b)
- @a.should == [1, 3]
- @a.keys.should == [:a, :c]
- @a[:a].should == 1
- @a[:c].should == 3
- end
-
- specify "should separate array keys after #delete/#delete_at" do
- b = @a.dup
-
- b.delete(:b)
-
- @a.keys.should == [:a, :b, :c]
- b.keys.should == [:a, :c]
- @a.should == [1, 2, 3]
- b.should == [1, 3]
- @a[:b].should == 2
- b[:b].should == nil
- end
-
- specify "should provide #each_key functionality" do
- keys = []
- @a.each_key {|k| keys << k}
- keys.should == [:a, :b, :c]
- end
-
- specify "should provide #each_value functionality" do
- values = []
- @a.each_value {|v| values << v}
- values.should == [1, 2, 3]
- end
-
- specify "should provide stock #include? functionality for arrays without keys" do
- [1, 2, 3].include?(2).should be_true
- [1, 2, 3].include?(4).should be_false
- end
-
- specify "should provide #has_key?/#member?/#key?/#include? functionality" do
- @a.has_key?(:a).should be_true
- @a.has_key?(:b).should be_true
- @a.has_key?(:c).should be_true
- @a.has_key?(:B).should be_false
- @a.has_key?(:d).should be_false
-
- @a.has_key?('a').should be_true
- @a.has_key?('b').should be_true
- @a.has_key?('c').should be_true
- @a.has_key?('A').should be_false
- @a.has_key?('d').should be_false
-
- @a.key?(:a).should be_true
- @a.key?(:b).should be_true
- @a.key?(:c).should be_true
- @a.key?(:B).should be_false
- @a.key?(:d).should be_false
-
- @a.key?('a').should be_true
- @a.key?('b').should be_true
- @a.key?('c').should be_true
- @a.key?('A').should be_false
- @a.key?('d').should be_false
-
- @a.member?(:a).should be_true
- @a.member?(:b).should be_true
- @a.member?(:c).should be_true
- @a.member?(:B).should be_false
- @a.member?(:d).should be_false
-
- @a.member?('a').should be_true
- @a.member?('b').should be_true
- @a.member