From bfd5e5f23d046a7f70002b44b58b1fde683ba6b0 Mon Sep 17 00:00:00 2001 From: rick Date: Tue, 4 Nov 2008 00:02:23 -0800 Subject: [PATCH] freeze dm-core too --- vendor/dm-core-0.9.6/.autotest | 26 + vendor/dm-core-0.9.6/CONTRIBUTING | 51 + vendor/dm-core-0.9.6/FAQ | 92 ++ vendor/dm-core-0.9.6/History.txt | 145 ++ vendor/dm-core-0.9.6/MIT-LICENSE | 22 + vendor/dm-core-0.9.6/Manifest.txt | 128 ++ vendor/dm-core-0.9.6/QUICKLINKS | 12 + vendor/dm-core-0.9.6/README.txt | 143 ++ vendor/dm-core-0.9.6/Rakefile | 30 + vendor/dm-core-0.9.6/SPECS | 63 + vendor/dm-core-0.9.6/TODO | 1 + vendor/dm-core-0.9.6/lib/dm-core.rb | 217 +++ vendor/dm-core-0.9.6/lib/dm-core/adapters.rb | 22 + .../lib/dm-core/adapters/abstract_adapter.rb | 209 +++ .../dm-core/adapters/data_objects_adapter.rb | 707 +++++++++ .../lib/dm-core/adapters/in_memory_adapter.rb | 87 ++ .../lib/dm-core/adapters/mysql_adapter.rb | 136 ++ .../lib/dm-core/adapters/postgres_adapter.rb | 188 +++ .../lib/dm-core/adapters/sqlite3_adapter.rb | 105 ++ .../dm-core-0.9.6/lib/dm-core/associations.rb | 200 +++ .../lib/dm-core/associations/many_to_many.rb | 147 ++ .../lib/dm-core/associations/many_to_one.rb | 107 ++ .../lib/dm-core/associations/one_to_many.rb | 318 ++++ .../lib/dm-core/associations/one_to_one.rb | 61 + .../lib/dm-core/associations/relationship.rb | 223 +++ .../associations/relationship_chain.rb | 81 + .../lib/dm-core/auto_migrations.rb | 113 ++ .../dm-core-0.9.6/lib/dm-core/collection.rb | 642 ++++++++ .../lib/dm-core/dependency_queue.rb | 32 + vendor/dm-core-0.9.6/lib/dm-core/hook.rb | 11 + .../dm-core-0.9.6/lib/dm-core/identity_map.rb | 42 + vendor/dm-core-0.9.6/lib/dm-core/is.rb | 16 + vendor/dm-core-0.9.6/lib/dm-core/logger.rb | 232 +++ .../migrations/destructive_migrations.rb | 17 + vendor/dm-core-0.9.6/lib/dm-core/migrator.rb | 29 + vendor/dm-core-0.9.6/lib/dm-core/model.rb | 488 ++++++ .../lib/dm-core/naming_conventions.rb | 84 + vendor/dm-core-0.9.6/lib/dm-core/property.rb | 663 ++++++++ .../dm-core-0.9.6/lib/dm-core/property_set.rb | 169 ++ vendor/dm-core-0.9.6/lib/dm-core/query.rb | 627 ++++++++ .../dm-core-0.9.6/lib/dm-core/repository.rb | 159 ++ vendor/dm-core-0.9.6/lib/dm-core/resource.rb | 637 ++++++++ vendor/dm-core-0.9.6/lib/dm-core/scope.rb | 58 + vendor/dm-core-0.9.6/lib/dm-core/support.rb | 7 + .../lib/dm-core/support/array.rb | 13 + .../lib/dm-core/support/assertions.rb | 8 + .../lib/dm-core/support/errors.rb | 23 + .../lib/dm-core/support/kernel.rb | 11 + .../lib/dm-core/support/symbol.rb | 41 + .../dm-core-0.9.6/lib/dm-core/transaction.rb | 267 ++++ vendor/dm-core-0.9.6/lib/dm-core/type.rb | 160 ++ vendor/dm-core-0.9.6/lib/dm-core/type_map.rb | 80 + vendor/dm-core-0.9.6/lib/dm-core/types.rb | 19 + .../lib/dm-core/types/boolean.rb | 7 + .../lib/dm-core/types/discriminator.rb | 34 + .../dm-core-0.9.6/lib/dm-core/types/object.rb | 24 + .../lib/dm-core/types/paranoid_boolean.rb | 34 + .../lib/dm-core/types/paranoid_datetime.rb | 33 + .../dm-core-0.9.6/lib/dm-core/types/serial.rb | 9 + .../dm-core-0.9.6/lib/dm-core/types/text.rb | 10 + vendor/dm-core-0.9.6/lib/dm-core/version.rb | 3 + vendor/dm-core-0.9.6/script/all | 5 + vendor/dm-core-0.9.6/script/performance.rb | 284 ++++ vendor/dm-core-0.9.6/script/profile.rb | 87 ++ .../spec/integration/association_spec.rb | 1383 +++++++++++++++++ .../integration/association_through_spec.rb | 203 +++ .../associations/many_to_many_spec.rb | 449 ++++++ .../associations/many_to_one_spec.rb | 163 ++ .../associations/one_to_many_spec.rb | 188 +++ .../spec/integration/auto_migrations_spec.rb | 398 +++++ .../spec/integration/collection_spec.rb | 1069 +++++++++++++ .../integration/data_objects_adapter_spec.rb | 32 + .../spec/integration/dependency_queue_spec.rb | 46 + .../spec/integration/model_spec.rb | 127 ++ .../spec/integration/mysql_adapter_spec.rb | 85 + .../spec/integration/postgres_adapter_spec.rb | 731 +++++++++ .../spec/integration/property_spec.rb | 233 +++ .../spec/integration/query_spec.rb | 506 ++++++ .../spec/integration/repository_spec.rb | 57 + .../spec/integration/resource_spec.rb | 475 ++++++ .../spec/integration/sqlite3_adapter_spec.rb | 352 +++++ .../spec/integration/sti_spec.rb | 230 +++ .../strategic_eager_loading_spec.rb | 153 ++ .../spec/integration/transaction_spec.rb | 75 + .../spec/integration/type_spec.rb | 271 ++++ .../dm-core-0.9.6/spec/lib/logging_helper.rb | 18 + vendor/dm-core-0.9.6/spec/lib/mock_adapter.rb | 27 + vendor/dm-core-0.9.6/spec/lib/model_loader.rb | 100 ++ .../spec/lib/publicize_methods.rb | 28 + vendor/dm-core-0.9.6/spec/models/content.rb | 16 + vendor/dm-core-0.9.6/spec/models/vehicles.rb | 34 + vendor/dm-core-0.9.6/spec/models/zoo.rb | 47 + vendor/dm-core-0.9.6/spec/spec.opts | 3 + vendor/dm-core-0.9.6/spec/spec_helper.rb | 86 + .../unit/adapters/abstract_adapter_spec.rb | 133 ++ .../spec/unit/adapters/adapter_shared_spec.rb | 15 + .../adapters/data_objects_adapter_spec.rb | 628 ++++++++ .../unit/adapters/in_memory_adapter_spec.rb | 98 ++ .../unit/adapters/postgres_adapter_spec.rb | 133 ++ .../unit/associations/many_to_many_spec.rb | 32 + .../unit/associations/many_to_one_spec.rb | 152 ++ .../unit/associations/one_to_many_spec.rb | 393 +++++ .../spec/unit/associations/one_to_one_spec.rb | 7 + .../unit/associations/relationship_spec.rb | 71 + .../spec/unit/associations_spec.rb | 242 +++ .../spec/unit/auto_migrations_spec.rb | 111 ++ .../spec/unit/collection_spec.rb | 182 +++ .../spec/unit/data_mapper_spec.rb | 35 + .../spec/unit/identity_map_spec.rb | 126 ++ vendor/dm-core-0.9.6/spec/unit/is_spec.rb | 80 + .../dm-core-0.9.6/spec/unit/migrator_spec.rb | 33 + vendor/dm-core-0.9.6/spec/unit/model_spec.rb | 323 ++++ .../spec/unit/naming_conventions_spec.rb | 36 + .../spec/unit/property_set_spec.rb | 90 ++ .../dm-core-0.9.6/spec/unit/property_spec.rb | 753 +++++++++ vendor/dm-core-0.9.6/spec/unit/query_spec.rb | 571 +++++++ .../spec/unit/repository_spec.rb | 93 ++ .../dm-core-0.9.6/spec/unit/resource_spec.rb | 635 ++++++++ vendor/dm-core-0.9.6/spec/unit/scope_spec.rb | 142 ++ .../spec/unit/transaction_spec.rb | 493 ++++++ .../dm-core-0.9.6/spec/unit/type_map_spec.rb | 114 ++ vendor/dm-core-0.9.6/spec/unit/type_spec.rb | 119 ++ vendor/dm-core-0.9.6/tasks/ci.rb | 36 + vendor/dm-core-0.9.6/tasks/dm.rb | 63 + vendor/dm-core-0.9.6/tasks/doc.rb | 20 + vendor/dm-core-0.9.6/tasks/gemspec.rb | 23 + vendor/dm-core-0.9.6/tasks/hoe.rb | 46 + vendor/dm-core-0.9.6/tasks/install.rb | 20 + 128 files changed, 22332 insertions(+) create mode 100644 vendor/dm-core-0.9.6/.autotest create mode 100644 vendor/dm-core-0.9.6/CONTRIBUTING create mode 100644 vendor/dm-core-0.9.6/FAQ create mode 100644 vendor/dm-core-0.9.6/History.txt create mode 100644 vendor/dm-core-0.9.6/MIT-LICENSE create mode 100644 vendor/dm-core-0.9.6/Manifest.txt create mode 100644 vendor/dm-core-0.9.6/QUICKLINKS create mode 100644 vendor/dm-core-0.9.6/README.txt create mode 100755 vendor/dm-core-0.9.6/Rakefile create mode 100644 vendor/dm-core-0.9.6/SPECS create mode 100644 vendor/dm-core-0.9.6/TODO create mode 100644 vendor/dm-core-0.9.6/lib/dm-core.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/adapters.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/adapters/abstract_adapter.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/adapters/data_objects_adapter.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/adapters/in_memory_adapter.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/adapters/mysql_adapter.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/adapters/postgres_adapter.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/adapters/sqlite3_adapter.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/associations.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/associations/many_to_many.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/associations/many_to_one.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/associations/one_to_many.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/associations/one_to_one.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/associations/relationship.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/associations/relationship_chain.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/auto_migrations.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/collection.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/dependency_queue.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/hook.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/identity_map.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/is.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/logger.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/migrations/destructive_migrations.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/migrator.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/model.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/naming_conventions.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/property.rb create mode 100755 vendor/dm-core-0.9.6/lib/dm-core/property_set.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/query.rb create mode 100755 vendor/dm-core-0.9.6/lib/dm-core/repository.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/resource.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/scope.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/support.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/support/array.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/support/assertions.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/support/errors.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/support/kernel.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/support/symbol.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/transaction.rb create mode 100755 vendor/dm-core-0.9.6/lib/dm-core/type.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/type_map.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/types.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/types/boolean.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/types/discriminator.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/types/object.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/types/paranoid_boolean.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/types/paranoid_datetime.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/types/serial.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/types/text.rb create mode 100644 vendor/dm-core-0.9.6/lib/dm-core/version.rb create mode 100755 vendor/dm-core-0.9.6/script/all create mode 100755 vendor/dm-core-0.9.6/script/performance.rb create mode 100755 vendor/dm-core-0.9.6/script/profile.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/association_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/association_through_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/associations/many_to_many_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/associations/many_to_one_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/associations/one_to_many_spec.rb create mode 100755 vendor/dm-core-0.9.6/spec/integration/auto_migrations_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/collection_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/data_objects_adapter_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/dependency_queue_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/model_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/mysql_adapter_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/postgres_adapter_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/property_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/query_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/repository_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/resource_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/sqlite3_adapter_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/sti_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/strategic_eager_loading_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/transaction_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/integration/type_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/lib/logging_helper.rb create mode 100644 vendor/dm-core-0.9.6/spec/lib/mock_adapter.rb create mode 100644 vendor/dm-core-0.9.6/spec/lib/model_loader.rb create mode 100644 vendor/dm-core-0.9.6/spec/lib/publicize_methods.rb create mode 100644 vendor/dm-core-0.9.6/spec/models/content.rb create mode 100644 vendor/dm-core-0.9.6/spec/models/vehicles.rb create mode 100644 vendor/dm-core-0.9.6/spec/models/zoo.rb create mode 100644 vendor/dm-core-0.9.6/spec/spec.opts create mode 100644 vendor/dm-core-0.9.6/spec/spec_helper.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/adapters/abstract_adapter_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/adapters/adapter_shared_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/adapters/data_objects_adapter_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/adapters/in_memory_adapter_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/adapters/postgres_adapter_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/associations/many_to_many_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/associations/many_to_one_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/associations/one_to_many_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/associations/one_to_one_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/associations/relationship_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/associations_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/auto_migrations_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/collection_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/data_mapper_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/identity_map_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/is_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/migrator_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/model_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/naming_conventions_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/property_set_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/property_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/query_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/repository_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/resource_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/scope_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/transaction_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/type_map_spec.rb create mode 100644 vendor/dm-core-0.9.6/spec/unit/type_spec.rb create mode 100644 vendor/dm-core-0.9.6/tasks/ci.rb create mode 100644 vendor/dm-core-0.9.6/tasks/dm.rb create mode 100644 vendor/dm-core-0.9.6/tasks/doc.rb create mode 100644 vendor/dm-core-0.9.6/tasks/gemspec.rb create mode 100644 vendor/dm-core-0.9.6/tasks/hoe.rb create mode 100644 vendor/dm-core-0.9.6/tasks/install.rb diff --git a/vendor/dm-core-0.9.6/.autotest b/vendor/dm-core-0.9.6/.autotest new file mode 100644 index 0000000..553a644 --- /dev/null +++ b/vendor/dm-core-0.9.6/.autotest @@ -0,0 +1,26 @@ +Autotest.add_hook :initialize do |at| + ignore = %w[ .git burn www log plugins script tasks bin CHANGELOG FAQ MIT-LICENSE PERFORMANCE QUICKLINKS README ] + + unless ENV['AUTOTEST'] == 'integration' + ignore << 'spec/integration' + end + + ignore.each do |exception| + at.add_exception(exception) + end + + at.clear_mappings + + at.add_mapping(%r{^spec/.+_spec\.rb$}) do |filename,_| + filename + end + + at.add_mapping(%r{^lib/data_mapper/(.+)\.rb$}) do |_,match| + [ "spec/unit/#{match[1]}_spec.rb" ] + + at.files_matching(%r{^spec/integration/.+_spec\.rb$}) + end + + at.add_mapping(%r{^spec/spec_helper\.rb$}) do + at.files_matching(%r{^spec/.+_spec\.rb$}) + end +end diff --git a/vendor/dm-core-0.9.6/CONTRIBUTING b/vendor/dm-core-0.9.6/CONTRIBUTING new file mode 100644 index 0000000..dd00b29 --- /dev/null +++ b/vendor/dm-core-0.9.6/CONTRIBUTING @@ -0,0 +1,51 @@ +# NOTE: This is a work in progress. As of July 24, it applies only to dm-core. + +# Contributing to Edge DataMapper + +We have now implemented Hoe throughout the DataMapper suite, so there will be a +handful of new procedures for contributing to our git repositories. I'll give +you a run through of how to set up your machine, and then provide a few +commands that should be run before committing or pushing changes. + +## Installing and configuring Hoe + +The first step is to install hoe. You'll need at least version 1.7.0. + + (sudo) gem install hoe --include-dependencies + +Now you'll need to configure hoe. You'll need to run this from inside of +dm-core, or one of the other DataMapper projects. + + rake config_hoe + +The only thing you should need to change is the exclude regular expression, +which needs to look like this: + + exclude: !ruby/regexp /tmp$|CVS|\.svn|\.git|.+\.gemspec/ + +Now you have the correct setup for contributing. + +## Before committing changes + +Before you commit changes, you must verify that `Manifest.txt` (the file which +contains the names of every file to be included in a gem release) and +`[project-name].gemspec` are up to date. We have create a rake task to make +this easy: + + rake gemspec + +This will check `Manifest.txt` (using Hoe's `rake check_manifest`) to ensure +there are no differences between the files in the project, and those listed in +the manifest. If there is a difference, it will display a warning and a list of +the differences in `diff` format. + +If the changes in the diff are correct, then you can run the following command +to update the manifest. + + rake check_manifest | patch + +If there are files you do not want added to the manifest, then you should +remove the files from the project, and then run `rake gemspec` again. + +If `rake gemspec` says it was successful, then you can proceed with committing +and pushing your changes. \ No newline at end of file diff --git a/vendor/dm-core-0.9.6/FAQ b/vendor/dm-core-0.9.6/FAQ new file mode 100644 index 0000000..79a39bf --- /dev/null +++ b/vendor/dm-core-0.9.6/FAQ @@ -0,0 +1,92 @@ +:include:QUICKLINKS + += FAQ + +=== So where's my :id column? + +DataMapper will NOT create an auto-incrementing :id key for you +automatically, so you'll need to either explicitly create one with + + property :id, Serial + +You can choose to use a natural key by doing + + property :slug, String, :key => true + +Remember, DataMapper supports multiple keys ("composite keys"), so if your +model has two or more keys, no big deal + + property :store_id, Integer, :key => true + property :invoice_id, Integer, :key => true + +=== How do I make a model paranoid? + +Create a property and make it a ParanoidDateTime or ParanoidBoolean type. + + property :deleted_at, ParanoidDateTime + property :deleted, ParanoidBoolean + +All of your calls to ##all(), ##first() will be scoped +with :deleted_at => nil or :deleted => false. Plus, +you won't see deleted objects in your associations. + +=== Does DataMapper do Single Table Inheritance? + +This is what the Discriminator data-type is for: + + class Person + include DataMapper::Resource + property :id, Serial + property :type, Discriminator ## other shared properties here + end + + class Salesperson < Person; end + +You can claim a column to have the type Discriminator and DataMapper will +automatically drop the class name of the inherited classes into that field of +the data-store. + +=== How do I run my own commands? + + repository.adapter.query("select * from users where clue > 0") + repository(:integration).adapter.query("select * from users where clue > 0") + +This does not return any Users (har har), but rather Struct's that will quack +like Users. They'll be read-only as well. + +repository.adapter.query shouldn't be used if you aren't expecting a result set +back. If you want to just execute something against the database, use +repository.adapter.execute instead. + + +=== Can I get an query log of what DataMapper is issuing? + +An example of how to modify an existing logger: + + DataMapper.logger.set_log(STDOUT, :debug) + +An example of how to create new logger: + + DataMapper::Logger.new(STDOUT, :info) + +To send a message to the DataMapper logger: + + DataMapper.logger.debug("something") + DataMapper.logger.info ("something") + DataMapper.logger.warn ("something") + DataMapper.logger.error("something") + DataMapper.logger.fatal("something") + + +=== I want to run the specs, but I have a custom database setup + +For example, if you installed MySQL using MacPorts, your socket may be located +at /opt/local/var/run/mysql5/mysqld.sock instead of /tmp/mysql.sock + +In that case, setup an environment variable in your shell before running the +specs: + export MYSQL_SPEC_URI="mysql://localhost/dm_core_test?socket=/opt/local/var/run/mysql5/mysqld.sock" + rake spec + +Using another kind of database? Note that spec_helper.rb will also look for +SQLITE3_SPEC_URI AND POSTGRES_SPEC_URI. diff --git a/vendor/dm-core-0.9.6/History.txt b/vendor/dm-core-0.9.6/History.txt new file mode 100644 index 0000000..35fd605 --- /dev/null +++ b/vendor/dm-core-0.9.6/History.txt @@ -0,0 +1,145 @@ + +== 0.3.0 +* HasManyAssociation::Set now has a nil? method, so we can do stuff like cage.animal.nil? + +== 0.2.5 +* has_one bugfixes +* Added syntax for setting CHECK-constraints directly in your properties (Postgres) +* You can now set indexes with :index => true and :index => :unique +* Support for composite indexes (thanks to Jeffrey Gelens) +* Add composite scope to validates_uniqueness +* Added private/protected properties +* Remove HasOneAssociation, Make HasManyAssociation impersonate has_one relationships +* Added #get method +* Persistence module added, inheriting from DataMapper::Base no longer necessary + +== 0.2.4 +* Bug fixes +* Added paranoia + +== 0.2.3 +* Added String#t for translation and overrides for default validation messages +* Give credit where it's due: zapnap, not pimpmaster, submitted the String#blank? patch. My bad. :-( +* MAJOR: Resolve issue with non-unique-hash values and #dirty?; now frozen original values are stored instead +* Added Base#update_attributes +* MAJOR: Queries are now passed to the database drivers in a parameterized fashion +* Updated PostgreSQL driver and adapter to current + +== 0.2.2 +* Removed C extension bundles and log files from package + +== 0.2.1 +* Added :float column support +* Added association proxies: ie: Zoo.first.exhibits.animals +* Columns stored in SortedSet +* Swig files are no longer RDOCed +* Added :date column support +* BUG: Fixed UTC issues with datetimes +* Added #to_yaml method +* Added #to_xml method +* Added #to_json method +* BUG: Fixed HasManyAssociation::Set#inspect +* BUG: Fixed #reload! +* BUG: Column copy for STI moved into Table#initialize to better handle STI with multiple mapped databases +* BUG: before_create callbacks moved in the execution flow since they weren't guaranteed to fire before +* Threading enhancements: Removed single_threaded_mode, #database block form adjusted for thread-safety +* BUG: Fixed String#blank? when a multi-line string contained a blank line (thanks zapnap!) +* Performance enhancements: (thanks wycats!) + +== 0.2.0 +* AdvancedHasManyAssociation now functional for fetches +* AdvancedHasManyAssociation renamed to HasNAssociation +* HasManyAssociation refactored to use HasNAssociation superclass +* Slight spec tweaks to accomodate the updates +* HasOneAssociation refactored to use HasNAssociation superclass +* Added HasAndBelongsToManyAssociation, using HasNAssociation as a basis; Need to add corresponding SQL generation code in AdvancedLoadCommand +* Added spec for habtm query generation +* HasNAssociation#foreign_key returns a DataMapper::Adapters::Sql::Mappings::Column instance instead of a raw String now +* Added table, association, association_table and to_sql methods to HasNAssociation +* Added associations_spec.rb +* Added a forced table-recreation to spec_helper.rb so the tests could run with a clean version of the database, including any new columns added to the models +* Added HasAndBelongsToManyAssociation#to_sql (all current specs pass now!) +* Minor tweaks to Callbacks +* Added CallbacksHelper to declare class-method ::callbacks on DataMapper::Base +* Implemented before_validate and after_validate hooks in ValidationHelper +* Minor documentation additions in callbacks.rb +* Added callbacks_spec +* Moved class-method declarations for built-in callbacks to the callbacks helper instead of DataMapper::Base +* Renamed :before/after_validate callback to :before/after_validation to match ActiveRecord +* Callbacks#add now accepts a Symbol which maps a callback to a method call on the targetted instance, also added a spec to verify this behavior +* Documented callbacks.rb +* Added DataMapper::Associations::Reference class +* Documented DataMapper::Associations::Reference class +* Upgraded BelongsToAssociation to new style +* Added AssociationsSet to handle simple "last-in" for association bindings +* Fixed extra spec loading +* Added *Association#columns +* Some refactoring in AdvancedLoadCommand regarding :include options +* Added support for class-less Mappings::Table instances, with just a string name +* HasAndBelongsToManyAssociation#join_table #left_foreign_key and #right_foreign_key reference actual Table or Column objects now +* Added :shallow_include option for HABTM joins in AdvancedLoadCommand and corresponding spec +* Added Commands::AdvancedConditions +* Added ORDER, LIMIT, OFFSET and WHERE support to AdvancedLoadCommand +* Renamed spec/has_many.rb to spec/has_many_spec.rb +* Tweaked the loading of has_many relationships; big performance boost; got rid of an extra query +* Added EmbeddedValue support, and accompanying spec +* Fleshed out AdvancedConditions a bit; added conditions_spec.rb +* Added more AdvancedConditions specs +* Added Loader to handle multi-instanced rows +* AdvancedLoadCommand replaced LoadCommand; down to 3 failing specs +* All specs pass +* Added :intercept_load finder option and accompanying spec +* Modified :intercept_load block signature to |instance,columns,row| +* HasAndBelongsToMany works, all specs pass +* Fixed a couple bugs with keys; Added DataMapper::Base#key= method +* Made DataMapper::Base#lazy_load! a little more flexible +* Removed LoadCommand overwrites from MysqlAdapter +* Default Database#single_threaded mode is true now +* Removed MysqlAdapter#initialize, which only served to setup the connections, moved to SqlAdapter +* Added SqlAdapter#create_connection and SqlAdapter#close_connection abstract methods +* Added MysqlAdapter#create_connection and MysqlAdapter#close_connection concrete methods +* Made SqlAdapter#connection a concrete method (instead of abstract), with support for single_threaded operation +* Database#setup now takes a Hash of options instead of a block-initializer +* Validation chaining should work for all association types +* Save chaining should work for has_many associations +* Added benchmarks for in-session performance to performance.rb +* Removed block conditions; They're slower and don't offer any real advantages +* Removed DeleteCommand +* Removed SaveCommand +* Removed TableExistsCommand +* Session renamed to Context +* Most command implementations moved to methods in SqlAdapter +* Removed UnitOfWork module, instead moving a slightly refactored implementation into Base + +== 0.1.1 +* Removed /lib/data_mapper/extensions +* Moved ActiveRecordImpersonation into DataMapper::Support module +* Moved CallbackHelper methods into DataMapper::Base class +* Moved ValidationHelper into DataMapper::Validations module +* Removed LoadedSet since it's not necessary for it to reference the Database, so it's nothing more than an array now; Replaced with Array +* Modified data_mapper.rb to load DataMapper::Support::Enumerable +* Modified example.rb and performance.rb to require 'lib/data_mapper' instead of modifying $LOADPATH +* Created SqlAdapter base-class +* Refactored MysqlAdapter to use SqlAdapter superclass +* Refactored Sqlite3Adapter to use SqlAdapter superclass +* Moved /lib/data_mapper/queries to /lib/data_mapper/adapters/sql/queries +* Moved Connection, Result and Reader classes along with Coersion and Quoting modules to DataMapper::Adapters::Sql module +* Moved DataMapper::Adapters::Sql::Queries to ::Commands +* Moved Mappings to SqlAdapter +* Added monolithic DeleteCommand +* Added monolithic SaveCommand +* Added TableExistsCommand +* Moved save/delete logic out of Session +* Added create-table functionality to SaveCommand +* Cleaned up Session; #find no longer supported, use #all or #first +* Moved object materialization into LoadCommand +* Migrated Sqlite3Adapter::Commands +* Added Session#query support back in +* Removed Connection/Reader/Result classes +* Set DataMapper::Base#key on load to avoid double-hit against Schema +* Added DataMapper::Support::Struct for increased Session#query performance +* Added AdvancedHasManyAssociation (preview status) +* Added benchmarks comparing ActiveRecord::Base::find_by_sql with Session#query + +== 0.1.0 +* Initial Public Release diff --git a/vendor/dm-core-0.9.6/MIT-LICENSE b/vendor/dm-core-0.9.6/MIT-LICENSE new file mode 100644 index 0000000..40e341e --- /dev/null +++ b/vendor/dm-core-0.9.6/MIT-LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2007 Sam Smoot + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/dm-core-0.9.6/Manifest.txt b/vendor/dm-core-0.9.6/Manifest.txt new file mode 100644 index 0000000..d8ffd64 --- /dev/null +++ b/vendor/dm-core-0.9.6/Manifest.txt @@ -0,0 +1,128 @@ +.autotest +CONTRIBUTING +FAQ +History.txt +MIT-LICENSE +Manifest.txt +QUICKLINKS +README.txt +Rakefile +SPECS +TODO +lib/dm-core.rb +lib/dm-core/adapters.rb +lib/dm-core/adapters/abstract_adapter.rb +lib/dm-core/adapters/data_objects_adapter.rb +lib/dm-core/adapters/in_memory_adapter.rb +lib/dm-core/adapters/mysql_adapter.rb +lib/dm-core/adapters/postgres_adapter.rb +lib/dm-core/adapters/sqlite3_adapter.rb +lib/dm-core/associations.rb +lib/dm-core/associations/many_to_many.rb +lib/dm-core/associations/many_to_one.rb +lib/dm-core/associations/one_to_many.rb +lib/dm-core/associations/one_to_one.rb +lib/dm-core/associations/relationship.rb +lib/dm-core/associations/relationship_chain.rb +lib/dm-core/auto_migrations.rb +lib/dm-core/collection.rb +lib/dm-core/dependency_queue.rb +lib/dm-core/hook.rb +lib/dm-core/identity_map.rb +lib/dm-core/is.rb +lib/dm-core/logger.rb +lib/dm-core/migrations/destructive_migrations.rb +lib/dm-core/migrator.rb +lib/dm-core/model.rb +lib/dm-core/naming_conventions.rb +lib/dm-core/property.rb +lib/dm-core/property_set.rb +lib/dm-core/query.rb +lib/dm-core/repository.rb +lib/dm-core/resource.rb +lib/dm-core/scope.rb +lib/dm-core/support.rb +lib/dm-core/support/array.rb +lib/dm-core/support/assertions.rb +lib/dm-core/support/errors.rb +lib/dm-core/support/kernel.rb +lib/dm-core/support/symbol.rb +lib/dm-core/transaction.rb +lib/dm-core/type.rb +lib/dm-core/type_map.rb +lib/dm-core/types.rb +lib/dm-core/types/boolean.rb +lib/dm-core/types/discriminator.rb +lib/dm-core/types/object.rb +lib/dm-core/types/paranoid_boolean.rb +lib/dm-core/types/paranoid_datetime.rb +lib/dm-core/types/serial.rb +lib/dm-core/types/text.rb +lib/dm-core/version.rb +script/all +script/performance.rb +script/profile.rb +spec/integration/association_spec.rb +spec/integration/association_through_spec.rb +spec/integration/associations/many_to_many_spec.rb +spec/integration/associations/many_to_one_spec.rb +spec/integration/associations/one_to_many_spec.rb +spec/integration/auto_migrations_spec.rb +spec/integration/collection_spec.rb +spec/integration/data_objects_adapter_spec.rb +spec/integration/dependency_queue_spec.rb +spec/integration/model_spec.rb +spec/integration/mysql_adapter_spec.rb +spec/integration/postgres_adapter_spec.rb +spec/integration/property_spec.rb +spec/integration/query_spec.rb +spec/integration/repository_spec.rb +spec/integration/resource_spec.rb +spec/integration/sqlite3_adapter_spec.rb +spec/integration/sti_spec.rb +spec/integration/strategic_eager_loading_spec.rb +spec/integration/transaction_spec.rb +spec/integration/type_spec.rb +spec/lib/logging_helper.rb +spec/lib/mock_adapter.rb +spec/lib/model_loader.rb +spec/lib/publicize_methods.rb +spec/models/content.rb +spec/models/vehicles.rb +spec/models/zoo.rb +spec/spec.opts +spec/spec_helper.rb +spec/unit/adapters/abstract_adapter_spec.rb +spec/unit/adapters/adapter_shared_spec.rb +spec/unit/adapters/data_objects_adapter_spec.rb +spec/unit/adapters/in_memory_adapter_spec.rb +spec/unit/adapters/postgres_adapter_spec.rb +spec/unit/associations/many_to_many_spec.rb +spec/unit/associations/many_to_one_spec.rb +spec/unit/associations/one_to_many_spec.rb +spec/unit/associations/one_to_one_spec.rb +spec/unit/associations/relationship_spec.rb +spec/unit/associations_spec.rb +spec/unit/auto_migrations_spec.rb +spec/unit/collection_spec.rb +spec/unit/data_mapper_spec.rb +spec/unit/identity_map_spec.rb +spec/unit/is_spec.rb +spec/unit/migrator_spec.rb +spec/unit/model_spec.rb +spec/unit/naming_conventions_spec.rb +spec/unit/property_set_spec.rb +spec/unit/property_spec.rb +spec/unit/query_spec.rb +spec/unit/repository_spec.rb +spec/unit/resource_spec.rb +spec/unit/scope_spec.rb +spec/unit/transaction_spec.rb +spec/unit/type_map_spec.rb +spec/unit/type_spec.rb +tasks/ci.rb +tasks/dm.rb +tasks/doc.rb +tasks/gemspec.rb +tasks/hoe.rb +tasks/install.rb diff --git a/vendor/dm-core-0.9.6/QUICKLINKS b/vendor/dm-core-0.9.6/QUICKLINKS new file mode 100644 index 0000000..578a932 --- /dev/null +++ b/vendor/dm-core-0.9.6/QUICKLINKS @@ -0,0 +1,12 @@ += Quick Links + +* Setup and Configuration - DataMapper +* Finders and CRUD - +* Properties - DataMapper::Property +* FAQ[link:/files/FAQ.html] +* Contact Us + * Website - http://www.datamapper.org + * Bug Reports - http://wm.lighthouseapp.com/projects/4819-datamapper/overview + * IRC Channel - ##datamapper on irc.freenode.net + * Mailing List - http://groups.google.com/group/datamapper/ + diff --git a/vendor/dm-core-0.9.6/README.txt b/vendor/dm-core-0.9.6/README.txt new file mode 100644 index 0000000..db9c255 --- /dev/null +++ b/vendor/dm-core-0.9.6/README.txt @@ -0,0 +1,143 @@ + +:include:QUICKLINKS + += Why DataMapper? + +== Open Development + +DataMapper sports a very accessible code-base and a welcoming community. +Outside contributions and feedback are welcome and encouraged, especially +constructive criticism. Make your voice heard! Submit a +ticket[http://wm.lighthouseapp.com/projects/4819-datamapper/overview] or +patch[http://wm.lighthouseapp.com/projects/4819-datamapper/overview], speak up +on our mailing-list[http://groups.google.com/group/datamapper/], chat with us +on irc[irc://irc.freenode.net/#datamapper], write a spec, get it reviewed, ask +for commit rights. It's as easy as that to become a contributor. + +== Identity Map + +One row in the data-store should equal one object reference. Pretty simple idea. +Pretty profound impact. If you run the following code in ActiveRecord you'll +see all false results. Do the same in DataMapper and it's +true all the way down. + + @parent = Tree.find(:first, :conditions => ['name = ?', 'bob']) + + @parent.children.each do |child| + puts @parent.object_id == child.parent.object_id + end + +This makes DataMapper faster and allocate less resources to get things done. + +== Dirty Tracking + +When you save a model back to your data-store, DataMapper will only write +the fields that actually changed. So it plays well with others. You can +use it in an Integration data-store without worrying that your application will +be a bad actor causing trouble for all of your other processes. + +You can also configure which strategy you'd like to use to track dirtiness. + +== Eager Loading + +Ready for something amazing? The following example executes only two queries. + + zoos = Zoo.all + first = zoos.first + first.exhibits # Loads the exhibits for all the Zoo objects in the zoos variable. + +Pretty impressive huh? The idea is that you aren't going to load a set of +objects and use only an association in just one of them. This should hold up +pretty well against a 99% rule. When you don't want it to work like this, just +load the item you want in it's own set. So the DataMapper thinks ahead. We +like to call it "performant by default". This feature single-handedly wipes +out the "N+1 Query Problem". No need to specify an include option in +your finders. + +== Laziness Can Be A Virtue + +Text fields are expensive in data-stores. They're generally stored in a +different place than the rest of your data. So instead of a fast sequential +read from your hard-drive, your data-store server has to hop around all over the +place to get what it needs. Since ActiveRecord returns everything by default, +adding a text field to a table slows everything down drastically, across the +board. + +Not so with the DataMapper. Text fields are treated like in-row associations +by default, meaning they only load when you need them. If you want more +control you can enable or disable this feature for any field (not just +text-fields) by passing a @lazy@ option to your field mapping with a value of +true or false. + + class Animal + include DataMapper::Resource + property :name, String + property :notes, Text, :lazy => false + end + +Plus, lazy-loading of text fields happens automatically and intelligently when +working with associations. The following only issues 2 queries to load up all +of the notes fields on each animal: + + animals = Animal.all + animals.each do |pet| + pet.notes + end + +== Plays Well With Others + +In ActiveRecord, all your fields are mapped, whether you want them or not. +This slows things down. In the DataMapper you define your mappings in your +model. So instead of an _ALTER TABLE ADD field_ in your data-store, you simply +add a property :name, :string to your model. DRY. No schema.rb. No +migration files to conflict or die without reverting changes. Your model +drives the data-store, not the other way around. + +Unless of course you want to map to a legacy data-store. Raise your hand if you +like seeing a method called col2Name on your model just because +that's what it's called in an old data-store you can't afford to change right +now? In DataMapper you control the mappings: + + class Fruit + include DataMapper::Resource + storage_names[:repo] = 'frt' + property :name, String, :field => 'col2Name' + end + +== All Ruby, All The Time + +It's great that ActiveRecord allows you to write SQL when you need to, but +should we have to so often? + +DataMapper supports issuing your own query, but it also provides more helpers +and a unique hash-based condition syntax to cover more of the use-cases where +issuing your own SQL would have been the only way to go. For example, any +finder option that's non-standard is considered a condition. So you can write +Zoo.all(:name => 'Dallas') and DataMapper will look for zoos with the +name of 'Dallas'. + +It's just a little thing, but it's so much nicer than writing +Zoo.find(:all, :conditions => ['name = ?', 'Dallas']). What if you +need other comparisons though? Try these: + + Zoo.first(:name => 'Galveston') + + # 'gt' means greater-than. We also do 'lt'. + Person.all(:age.gt => 30) + + # 'gte' means greather-than-or-equal-to. We also do 'lte'. + Person.all(:age.gte => 30) + + Person.all(:name.not => 'bob') + + # If the value of a pair is an Array, we do an IN-clause for you. + Person.all(:name.like => 'S%', :id => [1, 2, 3, 4, 5]) + + # An alias for Zoo.find(11) + Zoo[11] + + # Does a NOT IN () clause for you. + Person.all(:name.not => ['bob','rick','steve']) + +See? Fewer SQL fragments dirtying your Ruby code. And that's just a few of the +nice syntax tweaks DataMapper delivers out of the box... diff --git a/vendor/dm-core-0.9.6/Rakefile b/vendor/dm-core-0.9.6/Rakefile new file mode 100755 index 0000000..66aedaa --- /dev/null +++ b/vendor/dm-core-0.9.6/Rakefile @@ -0,0 +1,30 @@ +#!/usr/bin/env ruby +require 'pathname' +require 'rubygems' +require 'rake' +require 'rake/rdoctask' +require 'spec/rake/spectask' + +require 'lib/dm-core/version' + +ROOT = Pathname(__FILE__).dirname.expand_path + +AUTHOR = "Sam Smoot" +EMAIL = "ssmoot@gmail.com" +GEM_NAME = "dm-core" +GEM_VERSION = DataMapper::VERSION +GEM_DEPENDENCIES = ["data_objects", ">=0.9.5"], ["extlib", ">=0.9.5"], + ["rspec", ">=1.1.3"], ["addressable", ">=1.0.4"] + + +PROJECT_NAME = "datamapper" +PROJECT_DESCRIPTION = "Faster, Better, Simpler." +PROJECT_SUMMARY = "An Object/Relational Mapper for Ruby" +PROJECT_URL = "http://datamapper.org" + +require ROOT + 'tasks/hoe' +require ROOT + 'tasks/gemspec' +require ROOT + 'tasks/install' +require ROOT + 'tasks/dm' +require ROOT + 'tasks/doc' +require ROOT + 'tasks/ci' diff --git a/vendor/dm-core-0.9.6/SPECS b/vendor/dm-core-0.9.6/SPECS new file mode 100644 index 0000000..a130b00 --- /dev/null +++ b/vendor/dm-core-0.9.6/SPECS @@ -0,0 +1,63 @@ +Reading Specs +============= + + Blah blah blah... + +Writing Specs +============= + + Here are some general dos and don'ts + + = DO: + + * Write more specs for error conditions than clean conditions. + * Write specs with readability in mind. Somebody knew to DataMapper should be + able to read specs to learn how something works. + * Use existing models that are part of a metaphor. + * Nest describe blocks (2 or 3 levels deep is probably fine). + * Limit a describe block to 10 - 15 examples. + * Group specs by method being tested. (See the 'Ordering Specs' section) + * Use custom matchers. + + = DON'T: + + * Spec more than one unit of functionality in an example. An example should be + as short as possible (while still remaining readable). + * Spec implementation. Refactoring code should not break specs. + * Declare models in the spec file. + + And a final do: Do go against the guidelines if your best judgement tells you + to. These are just guidelines and are obviously not fast rules. + +Models +====== + + Models are declared in separate files as opposed to individual spec files for + two reasons. The first is to improve readability. By creating as few models + as possible and sharing these models throughout the specs, a reader can + become familiar with the models being used quicker. Models also follow a + few simple metaphors, such as a zoo, a blog implementation, etc... Following + metaphors makes it easier for a reader to guess what is going on with respect + to the models. + + The second reason is to allow the spec environment to be as pristine as + possible going into an example. Models being loaded from the model directory + are tracked and reloaded before each example. Any changes that might be made + to the model are reset at the end. + +Mocks and Stubs +=============== + + Obviously, mocks and stubs are a powerful feature when it comes to BDD; + however, remember that you are writing specs for behavior and NOT + implementation. + +Ordering Specs +============== + + Specs aren't much use if nobody can find where anything is, so keeping specs + well organized is critical. Currently, we are trying out the following + structure: + + * List guidelines here... + \ No newline at end of file diff --git a/vendor/dm-core-0.9.6/TODO b/vendor/dm-core-0.9.6/TODO new file mode 100644 index 0000000..575c7d0 --- /dev/null +++ b/vendor/dm-core-0.9.6/TODO @@ -0,0 +1 @@ +See: http://github.com/sam/dm-core/wikis diff --git a/vendor/dm-core-0.9.6/lib/dm-core.rb b/vendor/dm-core-0.9.6/lib/dm-core.rb new file mode 100644 index 0000000..c771c72 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core.rb @@ -0,0 +1,217 @@ +# This file begins the loading sequence. +# +# Quick Overview: +# * Requires fastthread, support libs, and base. +# * Sets the application root and environment for compatibility with frameworks +# such as Rails or Merb. +# * Checks for the database.yml and loads it if it exists. +# * Sets up the database using the config from the Yaml file or from the +# environment. +# + +require 'date' +require 'pathname' +require 'set' +require 'time' +require 'yaml' + +require 'rubygems' + +gem 'addressable', '>=1.0.4' +require 'addressable/uri' + +gem 'extlib', '>=0.9.5' +require 'extlib' +require "extlib/inflection" + +begin + require 'fastthread' +rescue LoadError + # fastthread not installed +end + +dir = Pathname(__FILE__).dirname.expand_path / 'dm-core' + +require dir / 'support' +require dir / 'resource' +require dir / 'model' + +require dir / 'dependency_queue' +require dir / 'type' +require dir / 'type_map' +require dir / 'types' +require dir / 'hook' +require dir / 'associations' +require dir / 'auto_migrations' +require dir / 'identity_map' +require dir / 'logger' +require dir / 'migrator' +require dir / 'naming_conventions' +require dir / 'property_set' +require dir / 'query' +require dir / 'transaction' +require dir / 'repository' +require dir / 'scope' +require dir / 'property' +require dir / 'adapters' +require dir / 'collection' +require dir / 'is' + +# == Setup and Configuration +# DataMapper uses URIs or a connection hash to connect to your data-store. +# URI connections takes the form of: +# DataMapper.setup(:default, 'protocol://username:password@localhost:port/path/to/repo') +# +# Breaking this down, the first argument is the name you wish to give this +# connection. If you do not specify one, it will be assigned :default. If you +# would like to connect to more than one data-store, simply issue this command +# again, but with a different name specified. +# +# In order to issue ORM commands without specifying the repository context, you +# must define the :default database. Otherwise, you'll need to wrap your ORM +# calls in repository(:name) { }. +# +# Second, the URI breaks down into the access protocol, the username, the +# server, the password, and whatever path information is needed to properly +# address the data-store on the server. +# +# Here's some examples +# DataMapper.setup(:default, "sqlite3://path/to/your/project/db/development.db") +# DataMapper.setup(:default, "mysql://localhost/dm_core_test") +# # no auth-info +# DataMapper.setup(:default, "postgres://root:supahsekret@127.0.0.1/dm_core_test") +# # with auth-info +# +# +# Alternatively, you can supply a hash as the second parameter, which would +# take the form: +# +# DataMapper.setup(:default, { +# :adapter => 'adapter_name_here', +# :database => "path/to/repo", +# :username => 'username', +# :password => 'password', +# :host => 'hostname' +# }) +# +# === Logging +# To turn on error logging to STDOUT, issue: +# +# DataMapper::Logger.new(STDOUT, 0) +# +# You can pass a file location ("/path/to/log/file.log") in place of STDOUT. +# see DataMapper::Logger for more information. +# +module DataMapper + extend Assertions + + def self.root + @root ||= Pathname(__FILE__).dirname.parent.expand_path + end + + ## + # Setups up a connection to a data-store + # + # @param Symbol name a name for the context, defaults to :default + # @param [Hash{Symbol => String}, Addressable::URI, String] uri_or_options + # connection information + # + # @return Repository the resulting setup repository + # + # @raise ArgumentError "+name+ must be a Symbol, but was..." indicates that + # an invalid argument was passed for name[Symbol] + # @raise [ArgumentError] "+uri_or_options+ must be a Hash, URI or String, + # but was..." indicates that connection information could not be gleaned + # from the given uri_or_options + # + # - + # @api public + def self.setup(name, uri_or_options) + assert_kind_of 'name', name, Symbol + assert_kind_of 'uri_or_options', uri_or_options, Addressable::URI, Hash, String + + case uri_or_options + when Hash + adapter_name = uri_or_options[:adapter].to_s + when String, DataObjects::URI, Addressable::URI + uri_or_options = DataObjects::URI.parse(uri_or_options) if uri_or_options.kind_of?(String) + adapter_name = uri_or_options.scheme + end + + class_name = Extlib::Inflection.classify(adapter_name) + 'Adapter' + + unless Adapters::const_defined?(class_name) + lib_name = "#{Extlib::Inflection.underscore(adapter_name)}_adapter" + begin + require root / 'lib' / 'dm-core' / 'adapters' / lib_name + rescue LoadError => e + begin + require lib_name + rescue Exception + # library not found, raise the original error + raise e + end + end + end + + Repository.adapters[name] = Adapters::const_get(class_name).new(name, uri_or_options) + end + + ## + # Block Syntax + # Pushes the named repository onto the context-stack, + # yields a new session, and pops the context-stack. + # + # Non-Block Syntax + # Returns the current session, or if there is none, + # a new Session. + # + # @param [Symbol] args the name of a repository to act within or return, :default is default + # @yield [Proc] (optional) block to execute within the context of the named repository + # @demo spec/integration/repository_spec.rb + def self.repository(name = nil) # :yields: current_context + current_repository = if name + raise ArgumentError, "First optional argument must be a Symbol, but was #{args.first.inspect}" unless name.is_a?(Symbol) + Repository.context.detect { |r| r.name == name } || Repository.new(name) + else + Repository.context.last || Repository.new(Repository.default_name) + end + + if block_given? + current_repository.scope { |*block_args| yield(*block_args) } + else + current_repository + end + end + + # A logger should always be present. Lets be consistent with DO + Logger.new(nil, :off) + + ## + # destructively migrates the repository upwards to match model definitions + # + # @param [Symbol] name repository to act on, :default is the default + def self.migrate!(name = Repository.default_name) + repository(name).migrate! + end + + ## + # drops and recreates the repository upwards to match model definitions + # + # @param [Symbol] name repository to act on, :default is the default + def self.auto_migrate!(repository_name = nil) + AutoMigrator.auto_migrate(repository_name) + end + + def self.auto_upgrade!(repository_name = nil) + AutoMigrator.auto_upgrade(repository_name) + end + + def self.prepare(*args, &blk) + yield repository(*args) + end + + def self.dependency_queue + @dependency_queue ||= DependencyQueue.new + end +end diff --git a/vendor/dm-core-0.9.6/lib/dm-core/adapters.rb b/vendor/dm-core-0.9.6/lib/dm-core/adapters.rb new file mode 100644 index 0000000..8858549 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/adapters.rb @@ -0,0 +1,22 @@ +dir = Pathname(__FILE__).dirname.expand_path / 'adapters' + +require dir / 'abstract_adapter' +require dir / 'in_memory_adapter' + +# TODO Factor these out into dm-more +require dir / 'data_objects_adapter' +begin + require dir / 'sqlite3_adapter' +rescue LoadError + # ignore it +end +begin + require dir / 'mysql_adapter' +rescue LoadError + # ignore it +end +begin + require dir / 'postgres_adapter' +rescue LoadError + # ignore it +end diff --git a/vendor/dm-core-0.9.6/lib/dm-core/adapters/abstract_adapter.rb b/vendor/dm-core-0.9.6/lib/dm-core/adapters/abstract_adapter.rb new file mode 100644 index 0000000..28f4c1b --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/adapters/abstract_adapter.rb @@ -0,0 +1,209 @@ +module DataMapper + module Adapters + class AbstractAdapter + include Assertions + + attr_reader :name, :uri + attr_accessor :resource_naming_convention, :field_naming_convention + + def create(resources) + raise NotImplementedError + end + + def read_many(query) + raise NotImplementedError + end + + def read_one(query) + raise NotImplementedError + end + + def update(attributes, query) + raise NotImplementedError + end + + def delete(query) + raise NotImplementedError + end + + protected + + def normalize_uri(uri_or_options) + uri_or_options + end + + private + + # Instantiate an Adapter by passing it a DataMapper::Repository + # connection string for configuration. + def initialize(name, uri_or_options) + assert_kind_of 'name', name, Symbol + assert_kind_of 'uri_or_options', uri_or_options, Addressable::URI, DataObjects::URI, Hash, String + + @name = name + @uri = normalize_uri(uri_or_options) + + @resource_naming_convention = NamingConventions::Resource::UnderscoredAndPluralized + @field_naming_convention = NamingConventions::Field::Underscored + + @transactions = {} + end + + # TODO: move to dm-more/dm-migrations + module Migration + # + # Returns whether the storage_name exists. + # + # @param storage_name a String defining the name of a storage, + # for example a table name. + # + # @return true if the storage exists + # + # TODO: move to dm-more/dm-migrations (if possible) + def storage_exists?(storage_name) + raise NotImplementedError + end + + # + # Returns whether the field exists. + # + # @param storage_name a String defining the name of a storage, for example a table name. + # @param field_name a String defining the name of a field, for example a column name. + # + # @return true if the field exists. + # + # TODO: move to dm-more/dm-migrations (if possible) + def field_exists?(storage_name, field_name) + raise NotImplementedError + end + + # TODO: move to dm-more/dm-migrations + def upgrade_model_storage(repository, model) + raise NotImplementedError + end + + # TODO: move to dm-more/dm-migrations + def create_model_storage(repository, model) + raise NotImplementedError + end + + # TODO: move to dm-more/dm-migrations + def destroy_model_storage(repository, model) + raise NotImplementedError + end + + # TODO: move to dm-more/dm-migrations + def alter_model_storage(repository, *args) + raise NotImplementedError + end + + # TODO: move to dm-more/dm-migrations + def create_property_storage(repository, property) + raise NotImplementedError + end + + # TODO: move to dm-more/dm-migrations + def destroy_property_storage(repository, property) + raise NotImplementedError + end + + # TODO: move to dm-more/dm-migrations + def alter_property_storage(repository, *args) + raise NotImplementedError + end + + module ClassMethods + # Default TypeMap for all adapters. + # + # @return default TypeMap + # + # TODO: move to dm-more/dm-migrations + def type_map + @type_map ||= TypeMap.new + end + end + end + + include Migration + extend Migration::ClassMethods + + # TODO: move to dm-more/dm-transaction + module Transaction + # + # Pushes the given Transaction onto the per thread Transaction stack so + # that everything done by this Adapter is done within the context of said + # Transaction. + # + # @param transaction a Transaction to be the + # 'current' transaction until popped. + # + # TODO: move to dm-more/dm-transaction + def push_transaction(transaction) + transactions(Thread.current) << transaction + end + + # + # Pop the 'current' Transaction from the per thread Transaction stack so + # that everything done by this Adapter is no longer necessarily within the + # context of said Transaction. + # + # @return the former 'current' transaction. + # + # TODO: move to dm-more/dm-transaction + def pop_transaction + transactions(Thread.current).pop + end + + # + # Retrieve the current transaction for this Adapter. + # + # Everything done by this Adapter is done within the context of this + # Transaction. + # + # @return the 'current' transaction for this Adapter. + # + # TODO: move to dm-more/dm-transaction + def current_transaction + transactions(Thread.current).last + end + + # + # Returns whether we are within a Transaction. + # + # @return whether we are within a Transaction. + # + # TODO: move to dm-more/dm-transaction + def within_transaction? + !current_transaction.nil? + end + + # + # Produces a fresh transaction primitive for this Adapter + # + # Used by DataMapper::Transaction to perform its various tasks. + # + # @return a new Object that responds to :close, :begin, :commit, + # :rollback, :rollback_prepared and :prepare + # + # TODO: move to dm-more/dm-transaction (if possible) + def transaction_primitive + raise NotImplementedError + end + + private + def transactions(thread) + unless @transactions[thread] + @transactions.delete_if do |key, value| + !key.respond_to?(:alive?) || !key.alive? + end + @transactions[thread] = [] + end + @transactions[thread] + end + + end + + include Transaction + end # class AbstractAdapter + end # module Adapters +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/adapters/data_objects_adapter.rb b/vendor/dm-core-0.9.6/lib/dm-core/adapters/data_objects_adapter.rb new file mode 100644 index 0000000..faff385 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/adapters/data_objects_adapter.rb @@ -0,0 +1,707 @@ +gem 'data_objects', '>=0.9.5' +require 'data_objects' + +module DataMapper + module Adapters + # You must inherit from the DoAdapter, and implement the + # required methods to adapt a database library for use with the DataMapper. + # + # NOTE: By inheriting from DataObjectsAdapter, you get a copy of all the + # standard sub-modules (Quoting, Coersion and Queries) in your own Adapter. + # You can extend and overwrite these copies without affecting the originals. + class DataObjectsAdapter < AbstractAdapter + def create(resources) + created = 0 + resources.each do |resource| + repository = resource.repository + model = resource.model + attributes = resource.dirty_attributes + + # TODO: make a model.identity_field method + identity_field = model.key(repository.name).detect { |p| p.serial? } + + statement = create_statement(repository, model, attributes.keys, identity_field) + bind_values = attributes.values + + result = execute(statement, *bind_values) + + if result.to_i == 1 + if identity_field + identity_field.set!(resource, result.insert_id) + end + created += 1 + end + end + created + end + + def read_many(query) + Collection.new(query) do |collection| + with_connection do |connection| + command = connection.create_command(read_statement(query)) + command.set_types(query.fields.map { |p| p.primitive }) + + begin + bind_values = query.bind_values.map do |v| + v == [] ? [nil] : v + end + reader = command.execute_reader(*bind_values) + + while(reader.next!) + collection.load(reader.values) + end + ensure + reader.close if reader + end + end + end + end + + def read_one(query) + with_connection do |connection| + command = connection.create_command(read_statement(query)) + command.set_types(query.fields.map { |p| p.primitive }) + + begin + reader = command.execute_reader(*query.bind_values) + + if reader.next! + query.model.load(reader.values, query) + end + ensure + reader.close if reader + end + end + end + + def update(attributes, query) + statement = update_statement(attributes.keys, query) + bind_values = attributes.values + query.bind_values + execute(statement, *bind_values).to_i + end + + def delete(query) + statement = delete_statement(query) + execute(statement, *query.bind_values).to_i + end + + # Database-specific method + def execute(statement, *bind_values) + with_connection do |connection| + command = connection.create_command(statement) + command.execute_non_query(*bind_values) + end + end + + def query(statement, *bind_values) + with_reader(statement, bind_values) do |reader| + results = [] + + if (fields = reader.fields).size > 1 + fields = fields.map { |field| Extlib::Inflection.underscore(field).to_sym } + struct = Struct.new(*fields) + + while(reader.next!) do + results << struct.new(*reader.values) + end + else + while(reader.next!) do + results << reader.values.at(0) + end + end + + results + end + end + + protected + + def normalize_uri(uri_or_options) + if uri_or_options.kind_of?(String) || uri_or_options.kind_of?(Addressable::URI) + uri_or_options = DataObjects::URI.parse(uri_or_options) + end + + if uri_or_options.kind_of?(DataObjects::URI) + return uri_or_options + end + + adapter = uri_or_options.delete(:adapter).to_s + user = uri_or_options.delete(:username) + password = uri_or_options.delete(:password) + host = uri_or_options.delete(:host) + port = uri_or_options.delete(:port) + database = uri_or_options.delete(:database) + query = uri_or_options.to_a.map { |pair| pair * '=' } * '&' + query = nil if query == '' + + return DataObjects::URI.parse(Addressable::URI.new(adapter, user, password, host, port, database, query, nil)) + end + + # TODO: clean up once transaction related methods move to dm-more/dm-transactions + def create_connection + if within_transaction? + current_transaction.primitive_for(self).connection + else + # DataObjects::Connection.new(uri) will give you back the right + # driver based on the Uri#scheme. + DataObjects::Connection.new(@uri) + end + end + + # TODO: clean up once transaction related methods move to dm-more/dm-transactions + def close_connection(connection) + connection.close unless within_transaction? && current_transaction.primitive_for(self).connection == connection + end + + private + + def initialize(name, uri_or_options) + super + + # Default the driver-specifc logger to DataMapper's logger + if driver_module = DataObjects.const_get(@uri.scheme.capitalize) rescue nil + driver_module.logger = DataMapper.logger if driver_module.respond_to?(:logger=) + end + end + + def with_connection + connection = nil + begin + connection = create_connection + return yield(connection) + rescue => e + DataMapper.logger.error(e) + raise e + ensure + close_connection(connection) if connection + end + end + + def with_reader(statement, bind_values = []) + with_connection do |connection| + reader = nil + begin + reader = connection.create_command(statement).execute_reader(*bind_values) + return yield(reader) + ensure + reader.close if reader + end + end + end + + # This model is just for organization. The methods are included into the + # Adapter below. + module SQL + private + + # Adapters requiring a RETURNING syntax for INSERT statements + # should overwrite this to return true. + def supports_returning? + false + end + + # Adapters that do not support the DEFAULT VALUES syntax for + # INSERT statements should overwrite this to return false. + def supports_default_values? + true + end + + def create_statement(repository, model, properties, identity_field) + statement = "INSERT INTO #{quote_table_name(model.storage_name(repository.name))} " + + if supports_default_values? && properties.empty? + statement << 'DEFAULT VALUES' + else + statement << <<-EOS.compress_lines + (#{properties.map { |p| quote_column_name(p.field(repository.name)) } * ', '}) + VALUES + (#{(['?'] * properties.size) * ', '}) + EOS + end + + if supports_returning? && identity_field + statement << " RETURNING #{quote_column_name(identity_field.field(repository.name))}" + end + + statement + end + + def read_statement(query) + statement = "SELECT #{fields_statement(query)}" + statement << " FROM #{quote_table_name(query.model.storage_name(query.repository.name))}" + statement << links_statement(query) if query.links.any? + statement << " WHERE #{conditions_statement(query)}" if query.conditions.any? + statement << " GROUP BY #{group_by_statement(query)}" if query.unique? && query.fields.any? { |p| p.kind_of?(Property) } + statement << " ORDER BY #{order_statement(query)}" if query.order.any? + statement << " LIMIT #{quote_column_value(query.limit)}" if query.limit + statement << " OFFSET #{quote_column_value(query.offset)}" if query.offset && query.offset > 0 + statement + rescue => e + DataMapper.logger.error("QUERY INVALID: #{query.inspect} (#{e})") + raise e + end + + def update_statement(properties, query) + statement = "UPDATE #{quote_table_name(query.model.storage_name(query.repository.name))}" + statement << " SET #{set_statement(query.repository, properties)}" + statement << " WHERE #{conditions_statement(query)}" if query.conditions.any? + statement + end + + def set_statement(repository, properties) + properties.map { |p| "#{quote_column_name(p.field(repository.name))} = ?" } * ', ' + end + + def delete_statement(query) + statement = "DELETE FROM #{quote_table_name(query.model.storage_name(query.repository.name))}" + statement << " WHERE #{conditions_statement(query)}" if query.conditions.any? + statement + end + + def fields_statement(query) + qualify = query.links.any? + query.fields.map { |p| property_to_column_name(query.repository, p, qualify) } * ', ' + end + + def links_statement(query) + table_name = query.model.storage_name(query.repository.name) + + statement = '' + query.links.each do |relationship| + parent_table_name = relationship.parent_model.storage_name(query.repository.name) + child_table_name = relationship.child_model.storage_name(query.repository.name) + + join_table_name = table_name == parent_table_name ? child_table_name : parent_table_name + + # We only do INNER JOIN for now + statement << " INNER JOIN #{quote_table_name(join_table_name)} ON " + + statement << relationship.parent_key.zip(relationship.child_key).map do |parent_property,child_property| + condition_statement(query, :eql, parent_property, child_property) + end * ' AND ' + end + + statement + end + + def conditions_statement(query) + query.conditions.map { |o,p,b| condition_statement(query, o, p, b) } * ' AND ' + end + + def group_by_statement(query) + repository = query.repository + qualify = query.links.any? + query.fields.select { |p| p.kind_of?(Property) }.map { |p| property_to_column_name(repository, p, qualify) } * ', ' + end + + def order_statement(query) + repository = query.repository + qualify = query.links.any? + query.order.map { |i| order_column(repository, i, qualify) } * ', ' + end + + def order_column(repository, item, qualify) + property, descending = nil, false + + case item + when Property + property = item + when Query::Direction + property = item.property + descending = true if item.direction == :desc + end + + order_column = property_to_column_name(repository, property, qualify) + order_column << ' DESC' if descending + order_column + end + + def condition_statement(query, operator, left_condition, right_condition) + return left_condition if operator == :raw + + qualify = query.links.any? + + conditions = [ left_condition, right_condition ].map do |condition| + if condition.kind_of?(Property) || condition.kind_of?(Query::Path) + property_to_column_name(query.repository, condition, qualify) + elsif condition.kind_of?(Query) + opposite = condition == left_condition ? right_condition : left_condition + query.merge_subquery(operator, opposite, condition) + "(#{read_statement(condition)})" + + # [].all? is always true + elsif condition.kind_of?(Array) && condition.any? && condition.all? { |p| p.kind_of?(Property) } + property_values = condition.map { |p| property_to_column_name(query.repository, p, qualify) } + "(#{property_values * ', '})" + else + '?' + end + end + + comparison = case operator + when :eql, :in then equality_operator(right_condition) + when :not then inequality_operator(right_condition) + when :like then 'LIKE' + when :gt then '>' + when :gte then '>=' + when :lt then '<' + when :lte then '<=' + else raise "Invalid query operator: #{operator.inspect}" + end + + "(" + (conditions * " #{comparison} ") + ")" + end + + def equality_operator(operand) + case operand + when Array, Query then 'IN' + when Range then 'BETWEEN' + when NilClass then 'IS' + else '=' + end + end + + def inequality_operator(operand) + case operand + when Array, Query then 'NOT IN' + when Range then 'NOT BETWEEN' + when NilClass then 'IS NOT' + else '<>' + end + end + + def property_to_column_name(repository, property, qualify) + table_name = property.model.storage_name(repository.name) if property && property.respond_to?(:model) + + if table_name && qualify + "#{quote_table_name(table_name)}.#{quote_column_name(property.field(repository.name))}" + else + quote_column_name(property.field(repository.name)) + end + end + + # TODO: once the driver's quoting methods become public, have + # this method delegate to them instead + def quote_table_name(table_name) + table_name.gsub('"', '""').split('.').map { |part| "\"#{part}\"" } * '.' + end + + # TODO: once the driver's quoting methods become public, have + # this method delegate to them instead + def quote_column_name(column_name) + "\"#{column_name.gsub('"', '""')}\"" + end + + # TODO: once the driver's quoting methods become public, have + # this method delegate to them instead + def quote_column_value(column_value) + return 'NULL' if column_value.nil? + + case column_value + when String + if (integer = column_value.to_i).to_s == column_value + quote_column_value(integer) + elsif (float = column_value.to_f).to_s == column_value + quote_column_value(integer) + else + "'#{column_value.gsub("'", "''")}'" + end + when DateTime + quote_column_value(column_value.strftime('%Y-%m-%d %H:%M:%S')) + when Date + quote_column_value(column_value.strftime('%Y-%m-%d')) + when Time + quote_column_value(column_value.strftime('%Y-%m-%d %H:%M:%S') + ((column_value.usec > 0 ? ".#{column_value.usec.to_s.rjust(6, '0')}" : ''))) + when Integer, Float + column_value.to_s + when BigDecimal + column_value.to_s('F') + else + column_value.to_s + end + end + end #module SQL + + include SQL + + # TODO: move to dm-more/dm-migrations + module Migration + # TODO: move to dm-more/dm-migrations + def upgrade_model_storage(repository, model) + table_name = model.storage_name(repository.name) + + if success = create_model_storage(repository, model) + return model.properties(repository.name) + end + + properties = [] + + model.properties(repository.name).each do |property| + schema_hash = property_schema_hash(repository, property) + next if field_exists?(table_name, schema_hash[:name]) + statement = alter_table_add_column_statement(table_name, schema_hash) + execute(statement) + properties << property + end + + properties + end + + # TODO: move to dm-more/dm-migrations + def create_model_storage(repository, model) + return false if storage_exists?(model.storage_name(repository.name)) + + execute(create_table_statement(repository, model)) + + (create_index_statements(repository, model) + create_unique_index_statements(repository, model)).each do |sql| + execute(sql) + end + + true + end + + # TODO: move to dm-more/dm-migrations + def destroy_model_storage(repository, model) + execute(drop_table_statement(repository, model)) + true + end + + # TODO: move to dm-more/dm-transactions + def transaction_primitive + DataObjects::Transaction.create_for_uri(@uri) + end + + module SQL + private + + # Adapters that support AUTO INCREMENT fields for CREATE TABLE + # statements should overwrite this to return true + # + # TODO: move to dm-more/dm-migrations + def supports_serial? + false + end + + # TODO: move to dm-more/dm-migrations + def alter_table_add_column_statement(table_name, schema_hash) + "ALTER TABLE #{quote_table_name(table_name)} ADD COLUMN #{property_schema_statement(schema_hash)}" + end + + # TODO: move to dm-more/dm-migrations + def create_table_statement(repository, model) + repository_name = repository.name + + statement = <<-EOS.compress_lines + CREATE TABLE #{quote_table_name(model.storage_name(repository_name))} + (#{model.properties_with_subclasses(repository_name).map { |p| property_schema_statement(property_schema_hash(repository, p)) } * ', '} + EOS + + if (key = model.key(repository_name)).any? + statement << ", PRIMARY KEY(#{ key.map { |p| quote_column_name(p.field(repository_name)) } * ', '})" + end + + statement << ')' + statement + end + + # TODO: move to dm-more/dm-migrations + def drop_table_statement(repository, model) + "DROP TABLE IF EXISTS #{quote_table_name(model.storage_name(repository.name))}" + end + + # TODO: move to dm-more/dm-migrations + def create_index_statements(repository, model) + table_name = model.storage_name(repository.name) + model.properties(repository.name).indexes.map do |index_name, fields| + <<-EOS.compress_lines + CREATE INDEX #{quote_column_name("index_#{table_name}_#{index_name}")} ON + #{quote_table_name(table_name)} (#{fields.map { |f| quote_column_name(f) } * ', '}) + EOS + end + end + + # TODO: move to dm-more/dm-migrations + def create_unique_index_statements(repository, model) + table_name = model.storage_name(repository.name) + model.properties(repository.name).unique_indexes.map do |index_name, fields| + <<-EOS.compress_lines + CREATE UNIQUE INDEX #{quote_column_name("unique_index_#{table_name}_#{index_name}")} ON + #{quote_table_name(table_name)} (#{fields.map { |f| quote_column_name(f) } * ', '}) + EOS + end + end + + # TODO: move to dm-more/dm-migrations + def property_schema_hash(repository, property) + schema = self.class.type_map[property.type].merge(:name => property.field(repository.name)) + # TODO: figure out a way to specify the size not be included, even if + # a default is defined in the typemap + # - use this to make it so all TEXT primitive fields do not have size + if property.primitive == String && schema[:primitive] != 'TEXT' + schema[:size] = property.length + elsif property.primitive == BigDecimal || property.primitive == Float + schema[:precision] = property.precision + schema[:scale] = property.scale + end + + schema[:nullable?] = property.nullable? + schema[:serial?] = property.serial? + + if property.default.nil? || property.default.respond_to?(:call) + # remove the default if the property is not nullable + schema.delete(:default) unless property.nullable? + else + if property.type.respond_to?(:dump) + schema[:default] = property.type.dump(property.default, property) + else + schema[:default] = property.default + end + end + + schema + end + + # TODO: move to dm-more/dm-migrations + def property_schema_statement(schema) + statement = quote_column_name(schema[:name]) + statement << " #{schema[:primitive]}" + + if schema[:precision] && schema[:scale] + statement << "(#{[ :precision, :scale ].map { |k| quote_column_value(schema[k]) } * ','})" + elsif schema[:size] + statement << "(#{quote_column_value(schema[:size])})" + end + + statement << ' NOT NULL' unless schema[:nullable?] + statement << " DEFAULT #{quote_column_value(schema[:default])}" if schema.has_key?(:default) + statement + end + + # TODO: move to dm-more/dm-migrations + def relationship_schema_hash(relationship) + identifier, relationship = relationship + + self.class.type_map[Integer].merge(:name => "#{identifier}_id") if identifier == relationship.name + end + + # TODO: move to dm-more/dm-migrations + def relationship_schema_statement(hash) + property_schema_statement(hash) unless hash.nil? + end + end # module SQL + + include SQL + + module ClassMethods + # Default TypeMap for all data object based adapters. + # + # @return default TypeMap for data objects adapters. + # + # TODO: move to dm-more/dm-migrations + def type_map + @type_map ||= TypeMap.new(super) do |tm| + tm.map(Integer).to('INT') + tm.map(String).to('VARCHAR').with(:size => Property::DEFAULT_LENGTH) + tm.map(Class).to('VARCHAR').with(:size => Property::DEFAULT_LENGTH) + tm.map(DM::Discriminator).to('VARCHAR').with(:size => Property::DEFAULT_LENGTH) + tm.map(BigDecimal).to('DECIMAL').with(:precision => Property::DEFAULT_PRECISION, :scale => Property::DEFAULT_SCALE_BIGDECIMAL) + tm.map(Float).to('FLOAT').with(:precision => Property::DEFAULT_PRECISION) + tm.map(DateTime).to('DATETIME') + tm.map(Date).to('DATE') + tm.map(Time).to('TIMESTAMP') + tm.map(TrueClass).to('BOOLEAN') + tm.map(DM::Object).to('TEXT') + tm.map(DM::Text).to('TEXT') + end + end + end # module ClassMethods + end # module Migration + + include Migration + extend Migration::ClassMethods + end # class DataObjectsAdapter + end # module Adapters + + # TODO: move to dm-ar-finders + module Model + # + # Find instances by manually providing SQL + # + # @param sql an SQL query to execute + # @param an Array containing a String (being the SQL query to + # execute) and the parameters to the query. + # example: ["SELECT name FROM users WHERE id = ?", id] + # @param query a prepared Query to execute. + # @param opts an options hash. + # :repository the name of the repository to execute the query + # in. Defaults to self.default_repository_name. + # :reload whether to reload any instances found that already + # exist in the identity map. Defaults to false. + # :properties the Properties of the instance that the query + # loads. Must contain DataMapper::Properties. + # Defaults to self.properties. + # + # @note + # A String, Array or Query is required. + # @return the instance matched by the query. + # + # @example + # MyClass.find_by_sql(["SELECT id FROM my_classes WHERE county = ?", + # selected_county], :properties => MyClass.property[:id], + # :repository => :county_repo) + # + # - + # @api public + def find_by_sql(*args) + sql = nil + query = nil + bind_values = [] + properties = nil + do_reload = false + repository_name = default_repository_name + args.each do |arg| + if arg.is_a?(String) + sql = arg + elsif arg.is_a?(Array) + sql = arg.first + bind_values = arg[1..-1] + elsif arg.is_a?(DataMapper::Query) + query = arg + elsif arg.is_a?(Hash) + repository_name = arg.delete(:repository) if arg.include?(:repository) + properties = Array(arg.delete(:properties)) if arg.include?(:properties) + do_reload = arg.delete(:reload) if arg.include?(:reload) + raise "unknown options to #find_by_sql: #{arg.inspect}" unless arg.empty? + end + end + + repository = repository(repository_name) + raise "#find_by_sql only available for Repositories served by a DataObjectsAdapter" unless repository.adapter.is_a?(DataMapper::Adapters::DataObjectsAdapter) + + if query + sql = repository.adapter.send(:read_statement, query) + bind_values = query.bind_values + end + + raise "#find_by_sql requires a query of some kind to work" unless sql + + properties ||= self.properties(repository.name) + + Collection.new(Query.new(repository, self)) do |collection| + repository.adapter.send(:with_connection) do |connection| + command = connection.create_command(sql) + + begin + reader = command.execute_reader(*bind_values) + + while(reader.next!) + collection.load(reader.values) + end + ensure + reader.close if reader + end + end + end + end + end # module Model +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/adapters/in_memory_adapter.rb b/vendor/dm-core-0.9.6/lib/dm-core/adapters/in_memory_adapter.rb new file mode 100644 index 0000000..b1f8248 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/adapters/in_memory_adapter.rb @@ -0,0 +1,87 @@ +module DataMapper + module Adapters + class InMemoryAdapter < AbstractAdapter + def initialize(name, uri_or_options) + @records = Hash.new { |hash,model| hash[model] = Array.new } + end + + def create(resources) + resources.each do |resource| + @records[resource.model] << resource + end.size # just return the number of records + end + + def update(attributes, query) + read_many(query).each do |resource| + attributes.each do |property,value| + property.set!(resource, value) + end + end.size + end + + def read_one(query) + read(query, query.model, false) + end + + def read_many(query) + Collection.new(query) do |set| + read(query, set, true) + end + end + + def delete(query) + records = @records[query.model] + + read_many(query).each do |resource| + records.delete(resource) + end.size + end + + private + + def read(query, set, many = true) + model = query.model + conditions = query.conditions + + match_with = many ? :select : :detect + + # Iterate over the records for this model, and return + # the ones that match the conditions + result = @records[model].send(match_with) do |resource| + conditions.all? do |tuple| + operator, property, bind_value = *tuple + + value = property.get!(resource) + + case operator + when :eql, :in then equality_comparison(bind_value, value) + when :not then !equality_comparison(bind_value, value) + when :like then Regexp.new(bind_value) =~ value + when :gt then !value.nil? && value > bind_value + when :gte then !value.nil? && value >= bind_value + when :lt then !value.nil? && value < bind_value + when :lte then !value.nil? && value <= bind_value + else raise "Invalid query operator: #{operator.inspect}" + end + end + end + + return result unless many + + # TODO Sort + + # TODO Limit + + set.replace(result) + end + + def equality_comparison(bind_value, value) + case bind_value + when Array, Range then bind_value.include?(value) + when NilClass then value.nil? + else bind_value == value + end + end + end + end +end diff --git a/vendor/dm-core-0.9.6/lib/dm-core/adapters/mysql_adapter.rb b/vendor/dm-core-0.9.6/lib/dm-core/adapters/mysql_adapter.rb new file mode 100644 index 0000000..9e1651e --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/adapters/mysql_adapter.rb @@ -0,0 +1,136 @@ +gem 'do_mysql', '>=0.9.5' +require 'do_mysql' + +module DataMapper + module Adapters + # Options: + # host, user, password, database (path), socket(uri query string), port + class MysqlAdapter < DataObjectsAdapter + module SQL + private + + def supports_default_values? + false + end + + def quote_table_name(table_name) + "`#{table_name.gsub('`', '``')}`" + end + + def quote_column_name(column_name) + "`#{column_name.gsub('`', '``')}`" + end + + def quote_column_value(column_value) + case column_value + when TrueClass then quote_column_value(1) + when FalseClass then quote_column_value(0) + else + super + end + end + end #module SQL + + include SQL + + # TODO: move to dm-more/dm-migrations + module Migration + # TODO: move to dm-more/dm-migrations (if possible) + def storage_exists?(storage_name) + statement = <<-EOS.compress_lines + SELECT COUNT(*) + FROM `information_schema`.`tables` + WHERE `table_type` = 'BASE TABLE' + AND `table_schema` = ? + AND `table_name` = ? + EOS + + query(statement, db_name, storage_name).first > 0 + end + + # TODO: move to dm-more/dm-migrations (if possible) + def field_exists?(storage_name, field_name) + statement = <<-EOS.compress_lines + SELECT COUNT(*) + FROM `information_schema`.`columns` + WHERE `table_schema` = ? + AND `table_name` = ? + AND `column_name` = ? + EOS + + query(statement, db_name, storage_name, field_name).first > 0 + end + + private + + # TODO: move to dm-more/dm-migrations (if possible) + def db_name + @uri.path.split('/').last + end + + module SQL + private + + # TODO: move to dm-more/dm-migrations + def supports_serial? + true + end + + # TODO: move to dm-more/dm-migrations + def create_table_statement(repository, model) + "#{super} ENGINE = InnoDB CHARACTER SET #{character_set} COLLATE #{collation}" + end + + # TODO: move to dm-more/dm-migrations + def property_schema_hash(property, model) + schema = super + schema.delete(:default) if schema[:primitive] == 'TEXT' + schema + end + + # TODO: move to dm-more/dm-migrations + def property_schema_statement(schema) + statement = super + statement << ' AUTO_INCREMENT' if supports_serial? && schema[:serial?] + statement + end + + # TODO: move to dm-more/dm-migrations + def character_set + @character_set ||= show_variable('character_set_connection') || 'utf8' + end + + # TODO: move to dm-more/dm-migrations + def collation + @collation ||= show_variable('collation_connection') || 'utf8_general_ci' + end + + # TODO: move to dm-more/dm-migrations + def show_variable(name) + query('SHOW VARIABLES WHERE `variable_name` = ?', name).first.value rescue nil + end + end # module SQL + + include SQL + + module ClassMethods + # TypeMap for MySql databases. + # + # @return default TypeMap for MySql databases. + # + # TODO: move to dm-more/dm-migrations + def type_map + @type_map ||= TypeMap.new(super) do |tm| + tm.map(Integer).to('INT').with(:size => 11) + tm.map(TrueClass).to('TINYINT').with(:size => 1) # TODO: map this to a BIT or CHAR(0) field? + tm.map(Object).to('TEXT') + end + end + end # module ClassMethods + end # module Migration + + include Migration + extend Migration::ClassMethods + end # class MysqlAdapter + end # module Adapters +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/adapters/postgres_adapter.rb b/vendor/dm-core-0.9.6/lib/dm-core/adapters/postgres_adapter.rb new file mode 100644 index 0000000..1c8774e --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/adapters/postgres_adapter.rb @@ -0,0 +1,188 @@ +gem 'do_postgres', '>=0.9.5' +require 'do_postgres' + +module DataMapper + module Adapters + class PostgresAdapter < DataObjectsAdapter + module SQL + private + + def supports_returning? + true + end + end #module SQL + + include SQL + + # TODO: move to dm-more/dm-migrations (if possible) + module Migration + # TODO: move to dm-more/dm-migrations (if possible) + def storage_exists?(storage_name) + statement = <<-EOS.compress_lines + SELECT COUNT(*) + FROM "information_schema"."columns" + WHERE "table_name" = ? + AND "table_schema" = current_schema() + EOS + + query(statement, storage_name).first > 0 + end + + # TODO: move to dm-more/dm-migrations (if possible) + def field_exists?(storage_name, column_name) + statement = <<-EOS.compress_lines + SELECT COUNT(*) + FROM "pg_class" + JOIN "pg_attribute" ON "pg_class"."oid" = "pg_attribute"."attrelid" + WHERE "pg_attribute"."attname" = ? + AND "pg_class"."relname" = ? + AND "pg_attribute"."attnum" >= 0 + EOS + + query(statement, column_name, storage_name).first > 0 + end + + # TODO: move to dm-more/dm-migrations + def upgrade_model_storage(repository, model) + add_sequences(repository, model) + super + end + + # TODO: move to dm-more/dm-migrations + def create_model_storage(repository, model) + add_sequences(repository, model) + without_notices { super } + end + + # TODO: move to dm-more/dm-migrations + def destroy_model_storage(repository, model) + return true unless storage_exists?(model.storage_name(repository.name)) + success = without_notices { super } + model.properties(repository.name).each do |property| + drop_sequence(repository, property) if property.serial? + end + success + end + + protected + + # TODO: move to dm-more/dm-migrations + def create_sequence(repository, property) + return if sequence_exists?(repository, property) + execute(create_sequence_statement(repository, property)) + end + + # TODO: move to dm-more/dm-migrations + def drop_sequence(repository, property) + without_notices { execute(drop_sequence_statement(repository, property)) } + end + + module SQL + private + + # TODO: move to dm-more/dm-migrations + def drop_table_statement(repository, model) + "DROP TABLE #{quote_table_name(model.storage_name(repository.name))}" + end + + # TODO: move to dm-more/dm-migrations + def without_notices + # execute the block with NOTICE messages disabled + begin + execute('SET client_min_messages = warning') + yield + ensure + execute('RESET client_min_messages') + end + end + + # TODO: move to dm-more/dm-migrations + def add_sequences(repository, model) + model.properties(repository.name).each do |property| + create_sequence(repository, property) if property.serial? + end + end + + # TODO: move to dm-more/dm-migrations + def sequence_name(repository, property) + "#{property.model.storage_name(repository.name)}_#{property.field(repository.name)}_seq" + end + + # TODO: move to dm-more/dm-migrations + def sequence_exists?(repository, property) + statement = <<-EOS.compress_lines + SELECT COUNT(*) + FROM "pg_class" + WHERE "relkind" = 'S' AND "relname" = ? + EOS + + query(statement, sequence_name(repository, property)).first > 0 + end + + # TODO: move to dm-more/dm-migrations + def create_sequence_statement(repository, property) + "CREATE SEQUENCE #{quote_column_name(sequence_name(repository, property))}" + end + + # TODO: move to dm-more/dm-migrations + def drop_sequence_statement(repository, property) + "DROP SEQUENCE IF EXISTS #{quote_column_name(sequence_name(repository, property))}" + end + + # TODO: move to dm-more/dm-migrations + def property_schema_statement(schema) + statement = super + + if schema.has_key?(:sequence_name) + statement << " DEFAULT nextval('#{schema[:sequence_name]}') NOT NULL" + end + + statement + end + + # TODO: move to dm-more/dm-migrations + def property_schema_hash(repository, property) + schema = super + + if property.serial? + schema.delete(:default) # the sequence will be the default + schema[:sequence_name] = sequence_name(repository, property) + end + + # TODO: see if TypeMap can be updated to set specific attributes to nil + # for different adapters. precision/scale are perfect examples for + # Postgres floats + + # Postgres does not support precision and scale for Float + if property.primitive == Float + schema.delete(:precision) + schema.delete(:scale) + end + + schema + end + end # module SQL + + include SQL + + module ClassMethods + # TypeMap for PostgreSQL databases. + # + # @return default TypeMap for PostgreSQL databases. + # + # TODO: move to dm-more/dm-migrations + def type_map + @type_map ||= TypeMap.new(super) do |tm| + tm.map(DateTime).to('TIMESTAMP') + tm.map(Integer).to('INT4') + tm.map(Float).to('FLOAT8') + end + end + end # module ClassMethods + end # module Migration + + include Migration + extend Migration::ClassMethods + end # class PostgresAdapter + end # module Adapters +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/adapters/sqlite3_adapter.rb b/vendor/dm-core-0.9.6/lib/dm-core/adapters/sqlite3_adapter.rb new file mode 100644 index 0000000..bc1efbd --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/adapters/sqlite3_adapter.rb @@ -0,0 +1,105 @@ +gem 'do_sqlite3', '>=0.9.5' +require 'do_sqlite3' + +module DataMapper + module Adapters + class Sqlite3Adapter < DataObjectsAdapter + module SQL + private + + def quote_column_value(column_value) + case column_value + when TrueClass then quote_column_value('t') + when FalseClass then quote_column_value('f') + else + super + end + end + end # module SQL + + include SQL + + # TODO: move to dm-more/dm-migrations (if possible) + module Migration + # TODO: move to dm-more/dm-migrations (if possible) + def storage_exists?(storage_name) + query_table(storage_name).size > 0 + end + + # TODO: move to dm-more/dm-migrations (if possible) + def field_exists?(storage_name, column_name) + query_table(storage_name).any? do |row| + row.name == column_name + end + end + + private + + # TODO: move to dm-more/dm-migrations (if possible) + def query_table(table_name) + query('PRAGMA table_info(?)', table_name) + end + + module SQL +# private ## This cannot be private for current migrations + + # TODO: move to dm-more/dm-migrations + def supports_serial? + sqlite_version >= '3.1.0' + end + + # TODO: move to dm-more/dm-migrations + def create_table_statement(repository, model) + statement = <<-EOS.compress_lines + CREATE TABLE #{quote_table_name(model.storage_name(repository.name))} + (#{model.properties_with_subclasses(repository.name).map { |p| property_schema_statement(property_schema_hash(repository, p)) } * ', '} + EOS + + # skip adding the primary key if one of the columns is serial. In + # SQLite the serial column must be the primary key, so it has already + # been defined + unless model.properties(repository.name).any? { |p| p.serial? } + if (key = model.properties(repository.name).key).any? + statement << ", PRIMARY KEY(#{key.map { |p| quote_column_name(p.field(repository.name)) } * ', '})" + end + end + + statement << ')' + statement + end + + # TODO: move to dm-more/dm-migrations + def property_schema_statement(schema) + statement = super + statement << ' PRIMARY KEY AUTOINCREMENT' if supports_serial? && schema[:serial?] + statement + end + + # TODO: move to dm-more/dm-migrations + def sqlite_version + @sqlite_version ||= query('SELECT sqlite_version(*)').first + end + end # module SQL + + include SQL + + module ClassMethods + # TypeMap for SQLite 3 databases. + # + # @return default TypeMap for SQLite 3 databases. + # + # TODO: move to dm-more/dm-migrations + def type_map + @type_map ||= TypeMap.new(super) do |tm| + tm.map(Integer).to('INTEGER') + tm.map(Class).to('VARCHAR') + end + end + end # module ClassMethods + end # module Migration + + include Migration + extend Migration::ClassMethods + end # class Sqlite3Adapter + end # module Adapters +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/associations.rb b/vendor/dm-core-0.9.6/lib/dm-core/associations.rb new file mode 100644 index 0000000..2d83b13 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/associations.rb @@ -0,0 +1,200 @@ +dir = Pathname(__FILE__).dirname.expand_path / 'associations' + +require dir / 'relationship' +require dir / 'relationship_chain' +require dir / 'many_to_many' +require dir / 'many_to_one' +require dir / 'one_to_many' +require dir / 'one_to_one' + +module DataMapper + module Associations + include Assertions + + class ImmutableAssociationError < RuntimeError + end + + class UnsavedParentError < RuntimeError + end + + # Returns all relationships that are many-to-one for this model. + # + # Used to find the relationships that require properties in any Repository. + # + # Example: + # class Plur + # include DataMapper::Resource + # def self.default_repository_name + # :plur_db + # end + # repository(:plupp_db) do + # has 1, :plupp + # end + # end + # + # This resource has a many-to-one to the Plupp resource residing in the :plupp_db repository, + # but the Plur resource needs the plupp_id property no matter what repository itself lives in, + # ie we need to create that property when we migrate etc. + # + # Used in DataMapper::Model.properties_with_subclasses + # + # @api private + def many_to_one_relationships + relationships unless @relationships # needs to be initialized! + @relationships.values.collect do |rels| rels.values end.flatten.select do |relationship| relationship.child_model == self end + end + + def relationships(repository_name = default_repository_name) + @relationships ||= {} + @relationships[repository_name] ||= repository_name == Repository.default_name ? {} : relationships(Repository.default_name).dup + end + + def n + 1.0/0 + end + + ## + # A shorthand, clear syntax for defining one-to-one, one-to-many and + # many-to-many resource relationships. + # + # @example [Usage] + # * has 1, :friend # one friend + # * has n, :friends # many friends + # * has 1..3, :friends + # # many friends (at least 1, at most 3) + # * has 3, :friends + # # many friends (exactly 3) + # * has 1, :friend, :class_name => 'User' + # # one friend with the class name User + # * has 3, :friends, :through => :friendships + # # many friends through the friendships relationship + # * has n, :friendships => :friends + # # identical to above example + # + # @param cardinality [Integer, Range, Infinity] + # cardinality that defines the association type and constraints + # @param name the name that the association will be referenced by + # @param opts an options hash + # + # @option :through[Symbol] A association that this join should go through to form + # a many-to-many association + # @option :class_name[String] The name of the class to associate with, if omitted + # then the association name is assumed to match the class name + # @option :remote_name[Symbol] In the case of a :through option being present, the + # name of the relationship on the other end of the :through-relationship + # to be linked to this relationship. + # + # @return [DataMapper::Association::Relationship] the relationship that was + # created to reflect either a one-to-one, one-to-many or many-to-many + # relationship + # @raise [ArgumentError] if the cardinality was not understood. Should be a + # Integer, Range or Infinity(n) + # + # @api public + def has(cardinality, name, options = {}) + + # NOTE: the reason for this fix is that with the ability to pass in two + # hashes into has() there might be instances where people attempt to + # pass in the options into the name part and not know why things aren't + # working for them. + if name.kind_of?(Hash) + name_through, through = name.keys.first, name.values.first + cardinality_string = cardinality.to_s == 'Infinity' ? 'n' : cardinality.inspect + warn("In #{self.name} 'has #{cardinality_string}, #{name_through.inspect} => #{through.inspect}' is deprecated. Use 'has #{cardinality_string}, #{name_through.inspect}, :through => #{through.inspect}' instead") + end + + options = options.merge(extract_min_max(cardinality)) + options = options.merge(extract_throughness(name)) + + # do not remove this. There is alot of confusion on people's + # part about what the first argument to has() is. For the record it + # is the min cardinality and max cardinality of the association. + # simply put, it constraints the number of resources that will be + # returned by the association. It is not, as has been assumed, + # the number of results on the left and right hand side of the + # reltionship. + if options[:min] == n && options[:max] == n + raise ArgumentError, 'Cardinality may not be n..n. The cardinality specifies the min/max number of results from the association', caller + end + + klass = options[:max] == 1 ? OneToOne : OneToMany + klass = ManyToMany if options[:through] == DataMapper::Resource + relationship = klass.setup(options.delete(:name), self, options) + + # Please leave this in - I will release contextual serialization soon + # which requires this -- guyvdb + # TODO convert this to a hook in the plugin once hooks work on class + # methods + self.init_has_relationship_for_serialization(relationship) if self.respond_to?(:init_has_relationship_for_serialization) + + relationship + end + + ## + # A shorthand, clear syntax for defining many-to-one resource relationships. + # + # @example [Usage] + # * belongs_to :user # many_to_one, :friend + # * belongs_to :friend, :class_name => 'User' # many_to_one :friends + # + # @param name [Symbol] The name that the association will be referenced by + # @see #has + # + # @return [DataMapper::Association::ManyToOne] The association created + # should not be accessed directly + # + # @api public + def belongs_to(name, options={}) + @_valid_relations = false + relationship = ManyToOne.setup(name, self, options) + # Please leave this in - I will release contextual serialization soon + # which requires this -- guyvdb + # TODO convert this to a hook in the plugin once hooks work on class + # methods + self.init_belongs_relationship_for_serialization(relationship) if self.respond_to?(:init_belongs_relationship_for_serialization) + + relationship + end + + private + + def extract_throughness(name) + assert_kind_of 'name', name, Hash, Symbol + + case name + when Hash + unless name.keys.size == 1 + raise ArgumentError, "name must have only one key, but had #{name.keys.size}", caller(2) + end + + { :name => name.keys.first, :through => name.values.first } + when Symbol + { :name => name } + end + end + + # A support method form converting Integer, Range or Infinity values into a + # { :min => x, :max => y } hash. + # + # @api private + def extract_min_max(constraints) + assert_kind_of 'constraints', constraints, Integer, Range unless constraints == n + + case constraints + when Integer + { :min => constraints, :max => constraints } + when Range + if constraints.first > constraints.last + raise ArgumentError, "Constraint min (#{constraints.first}) cannot be larger than the max (#{constraints.last})" + end + + { :min => constraints.first, :max => constraints.last } + when n + { :min => 0, :max => n } + end + end + end # module Associations + + Model.append_extensions DataMapper::Associations + +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/associations/many_to_many.rb b/vendor/dm-core-0.9.6/lib/dm-core/associations/many_to_many.rb new file mode 100644 index 0000000..a6a10b9 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/associations/many_to_many.rb @@ -0,0 +1,147 @@ +require File.join(File.dirname(__FILE__), "one_to_many") +module DataMapper + module Associations + module ManyToMany + extend Assertions + + # Setup many to many relationship between two models + # - + # @api private + def self.setup(name, model, options = {}) + assert_kind_of 'name', name, Symbol + assert_kind_of 'model', model, Model + assert_kind_of 'options', options, Hash + + repository_name = model.repository.name + + model.class_eval <<-EOS, __FILE__, __LINE__ + def #{name}(query = {}) + #{name}_association.all(query) + end + + def #{name}=(children) + #{name}_association.replace(children) + end + + private + + def #{name}_association + @#{name}_association ||= begin + unless relationship = model.relationships(#{repository_name.inspect})[#{name.inspect}] + raise ArgumentError, "Relationship #{name.inspect} does not exist in \#{model}" + end + association = Proxy.new(relationship, self) + parent_associations << association + association + end + end + EOS + + opts = options.dup + opts.delete(:through) + opts[:child_model] ||= opts.delete(:class_name) || Extlib::Inflection.classify(name) + opts[:parent_model] = model + opts[:repository_name] = repository_name + opts[:remote_relationship_name] ||= opts.delete(:remote_name) || Extlib::Inflection.tableize(opts[:child_model]) + opts[:parent_key] = opts[:parent_key] + opts[:child_key] = opts[:child_key] + opts[:mutable] = true + + names = [ opts[:child_model], opts[:parent_model].name ].sort + model_name = names.join.gsub("::", "") + storage_name = Extlib::Inflection.tableize(Extlib::Inflection.pluralize(names[0]) + names[1]) + + opts[:near_relationship_name] = Extlib::Inflection.tableize(model_name).to_sym + + model.has(model.n, opts[:near_relationship_name]) + + relationship = model.relationships(repository_name)[name] = RelationshipChain.new(opts) + + unless Object.const_defined?(model_name) + model = DataMapper::Model.new(storage_name) + + model.class_eval <<-EOS, __FILE__, __LINE__ + def self.name; #{model_name.inspect} end + def self.default_repository_name; #{repository_name.inspect} end + def self.many_to_many; true end + EOS + + names.each do |n| + model.belongs_to(Extlib::Inflection.underscore(n).gsub("/", "_").to_sym, :class_name => n) + end + + Object.const_set(model_name, model) + end + + relationship + end + + class Proxy < DataMapper::Associations::OneToMany::Proxy + def delete(resource) + through = near_association.get(*(@parent.key + resource.key)) + near_association.delete(through) + orphan_resource(super) + end + + def clear + near_association.clear + super + end + + def destroy + near_association.destroy + super + end + + def save + end + + private + + def new_child(attributes) + remote_relationship.parent_model.new(attributes) + end + + def relate_resource(resource) + assert_mutable + add_default_association_values(resource) + @orphans.delete(resource) + + # TODO: fix this so it does not automatically save on append, if possible + resource.save if resource.new_record? + through_resource = @relationship.child_model.new + @relationship.child_key.zip(@relationship.parent_key) do |child_key,parent_key| + through_resource.send("#{child_key.name}=", parent_key.get(@parent)) + end + remote_relationship.child_key.zip(remote_relationship.parent_key) do |child_key,parent_key| + through_resource.send("#{child_key.name}=", parent_key.get(resource)) + end + near_association << through_resource + + resource + end + + def orphan_resource(resource) + assert_mutable + @orphans << resource + resource + end + + def assert_mutable + end + + def remote_relationship + @remote_relationship ||= @relationship.send(:remote_relationship) + end + + def near_association + @near_association ||= @parent.send(near_relationship_name) + end + + def near_relationship_name + @near_relationship_name ||= @relationship.send(:instance_variable_get, :@near_relationship_name) + end + end # class Proxy + end # module ManyToMany + end # module Associations +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/associations/many_to_one.rb b/vendor/dm-core-0.9.6/lib/dm-core/associations/many_to_one.rb new file mode 100644 index 0000000..7c22c46 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/associations/many_to_one.rb @@ -0,0 +1,107 @@ +module DataMapper + module Associations + module ManyToOne + extend Assertions + + # Setup many to one relationship between two models + # - + # @api private + def self.setup(name, model, options = {}) + assert_kind_of 'name', name, Symbol + assert_kind_of 'model', model, Model + assert_kind_of 'options', options, Hash + + repository_name = model.repository.name + + model.class_eval <<-EOS, __FILE__, __LINE__ + def #{name} + #{name}_association.nil? ? nil : #{name}_association + end + + def #{name}=(parent) + #{name}_association.replace(parent) + end + + private + + def #{name}_association + @#{name}_association ||= begin + unless relationship = model.relationships(#{repository_name.inspect})[:#{name}] + raise ArgumentError, "Relationship #{name.inspect} does not exist in \#{model}" + end + association = Proxy.new(relationship, self) + child_associations << association + association + end + end + EOS + + model.relationships(repository_name)[name] = Relationship.new( + name, + repository_name, + model, + options.fetch(:class_name, Extlib::Inflection.classify(name)), + options + ) + end + + class Proxy + include Assertions + + instance_methods.each { |m| undef_method m unless %w[ __id__ __send__ class kind_of? respond_to? assert_kind_of should should_not instance_variable_set instance_variable_get ].include?(m) } + + def replace(parent) + @parent = parent + @relationship.attach_parent(@child, @parent) + self + end + + def save + return false if @parent.nil? + return true unless parent.new_record? + + @relationship.with_repository(parent) do + result = parent.save + @relationship.child_key.set(@child, @relationship.parent_key.get(parent)) if result + result + end + end + + def reload + @parent = nil + self + end + + def kind_of?(klass) + super || parent.kind_of?(klass) + end + + def respond_to?(method, include_private = false) + super || parent.respond_to?(method, include_private) + end + + def instance_variable_get(variable) + super || parent.instance_variable_get(variable) + end + + private + + def initialize(relationship, child) + assert_kind_of 'relationship', relationship, Relationship + assert_kind_of 'child', child, Resource + + @relationship = relationship + @child = child + end + + def parent + @parent ||= @relationship.get_parent(@child) + end + + def method_missing(method, *args, &block) + parent.__send__(method, *args, &block) + end + end # class Proxy + end # module ManyToOne + end # module Associations +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/associations/one_to_many.rb b/vendor/dm-core-0.9.6/lib/dm-core/associations/one_to_many.rb new file mode 100644 index 0000000..dec6816 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/associations/one_to_many.rb @@ -0,0 +1,318 @@ +module DataMapper + module Associations + module OneToMany + extend Assertions + + # Setup one to many relationship between two models + # - + # @api private + def self.setup(name, model, options = {}) + assert_kind_of 'name', name, Symbol + assert_kind_of 'model', model, Model + assert_kind_of 'options', options, Hash + + repository_name = model.repository.name + + model.class_eval <<-EOS, __FILE__, __LINE__ + def #{name}(query = {}) + #{name}_association.all(query) + end + + def #{name}=(children) + #{name}_association.replace(children) + end + + private + + def #{name}_association + @#{name}_association ||= begin + unless relationship = model.relationships(#{repository_name.inspect})[#{name.inspect}] + raise ArgumentError, "Relationship #{name.inspect} does not exist in \#{model}" + end + association = Proxy.new(relationship, self) + parent_associations << association + association + end + end + EOS + + model.relationships(repository_name)[name] = if options.has_key?(:through) + opts = options.dup + + if opts.key?(:class_name) && !opts.key?(:child_key) + warn(<<-EOS.margin) + You have specified #{model.base_model.name}.has(#{name.inspect}) with :class_name => #{opts[:class_name].inspect}. You probably also want to specify the :child_key option. + EOS + end + + opts[:child_model] ||= opts.delete(:class_name) || Extlib::Inflection.classify(name) + opts[:parent_model] = model + opts[:repository_name] = repository_name + opts[:near_relationship_name] = opts.delete(:through) + opts[:remote_relationship_name] ||= opts.delete(:remote_name) || name + opts[:parent_key] = opts[:parent_key] + opts[:child_key] = opts[:child_key] + + RelationshipChain.new( opts ) + else + Relationship.new( + name, + repository_name, + options.fetch(:class_name, Extlib::Inflection.classify(name)), + model, + options + ) + end + end + + # TODO: look at making this inherit from Collection. The API is + # almost identical, and it would make more sense for the + # relationship.get_children method to return a Proxy than a + # Collection that is wrapped in a Proxy. + class Proxy + include Assertions + + instance_methods.each { |m| undef_method m unless %w[ __id__ __send__ class kind_of? respond_to? assert_kind_of should should_not instance_variable_set instance_variable_get ].include?(m) } + + # FIXME: remove when RelationshipChain#get_children can return a Collection + def all(query = {}) + query.empty? ? self : @relationship.get_children(@parent, query) + end + + # FIXME: remove when RelationshipChain#get_children can return a Collection + def first(*args) + if args.last.respond_to?(:merge) + query = args.pop + @relationship.get_children(@parent, query, :first, *args) + else + super + end + end + + def <<(resource) + assert_mutable + return self if !resource.new_record? && self.include?(resource) + super + relate_resource(resource) + self + end + + def push(*resources) + assert_mutable + resources.reject { |resource| !resource.new_record? && self.include?(resource) } + super + resources.each { |resource| relate_resource(resource) } + self + end + + def unshift(*resources) + assert_mutable + resources.reject { |resource| !resource.new_record? && self.include?(resource) } + super + resources.each { |resource| relate_resource(resource) } + self + end + + def replace(other) + assert_mutable + each { |resource| orphan_resource(resource) } + other = other.map { |resource| resource.kind_of?(Hash) ? new_child(resource) : resource } + super + other.each { |resource| relate_resource(resource) } + self + end + + def pop + assert_mutable + orphan_resource(super) + end + + def shift + assert_mutable + orphan_resource(super) + end + + def delete(resource) + assert_mutable + orphan_resource(super) + end + + def delete_at(index) + assert_mutable + orphan_resource(super) + end + + def clear + assert_mutable + each { |resource| orphan_resource(resource) } + super + self + end + + def build(attributes = {}) + assert_mutable + attributes = default_attributes.merge(attributes) + resource = children.respond_to?(:build) ? super(attributes) : new_child(attributes) + resource + end + + def new(attributes = {}) + assert_mutable + raise UnsavedParentError, 'You cannot intialize until the parent is saved' if @parent.new_record? + attributes = default_attributes.merge(attributes) + resource = children.respond_to?(:new) ? super(attributes) : @relationship.child_model.new(attributes) + self << resource + resource + end + + def create(attributes = {}) + assert_mutable + raise UnsavedParentError, 'You cannot create until the parent is saved' if @parent.new_record? + attributes = default_attributes.merge(attributes) + resource = children.respond_to?(:create) ? super(attributes) : @relationship.child_model.create(attributes) + self << resource + resource + end + + def update(attributes = {}) + assert_mutable + raise UnsavedParentError, 'You cannot mass-update until the parent is saved' if @parent.new_record? + super + end + + def update!(attributes = {}) + assert_mutable + raise UnsavedParentError, 'You cannot mass-update without validations until the parent is saved' if @parent.new_record? + super + end + + def destroy + assert_mutable + raise UnsavedParentError, 'You cannot mass-delete until the parent is saved' if @parent.new_record? + super + end + + def destroy! + assert_mutable + raise UnsavedParentError, 'You cannot mass-delete without validations until the parent is saved' if @parent.new_record? + super + end + + def reload + @children = nil + self + end + + def save + return true if children.frozen? + + # save every resource in the collection + each { |resource| save_resource(resource) } + + # save orphan resources + @orphans.each do |resource| + begin + save_resource(resource, nil) + rescue + children << resource unless children.frozen? || children.include?(resource) + raise + end + end + + # FIXME: remove when RelationshipChain#get_children can return a Collection + # place the children into a Collection if not already + if children.kind_of?(Array) && !children.frozen? + @children = @relationship.get_children(@parent).replace(children) + end + + true + end + + def kind_of?(klass) + super || children.kind_of?(klass) + end + + def respond_to?(method, include_private = false) + super || children.respond_to?(method, include_private) + end + + private + + def initialize(relationship, parent) + assert_kind_of 'relationship', relationship, Relationship + assert_kind_of 'parent', parent, Resource + + @relationship = relationship + @parent = parent + @orphans = [] + end + + def children + @children ||= @relationship.get_children(@parent) + end + + def assert_mutable + raise ImmutableAssociationError, 'You can not modify this association' if children.frozen? + end + + def default_attributes + default_attributes = {} + + @relationship.query.each do |attribute, value| + next if Query::OPTIONS.include?(attribute) || attribute.kind_of?(Query::Operator) + default_attributes[attribute] = value + end + + @relationship.child_key.zip(@relationship.parent_key.get(@parent)) do |property,value| + default_attributes[property.name] = value + end + + default_attributes + end + + def add_default_association_values(resource) + default_attributes.each do |attribute, value| + next if !resource.respond_to?("#{attribute}=") || resource.attribute_loaded?(attribute) + resource.send("#{attribute}=", value) + end + end + + def new_child(attributes) + @relationship.child_model.new(default_attributes.merge(attributes)) + end + + def relate_resource(resource) + assert_mutable + add_default_association_values(resource) + @orphans.delete(resource) + resource + end + + def orphan_resource(resource) + assert_mutable + @orphans << resource + resource + end + + def save_resource(resource, parent = @parent) + @relationship.with_repository(resource) do |r| + if parent.nil? && resource.model.respond_to?(:many_to_many) + resource.destroy + else + @relationship.attach_parent(resource, parent) + resource.save + end + end + end + + def method_missing(method, *args, &block) + results = children.__send__(method, *args, &block) if children.respond_to?(method) + + return self if LazyArray::RETURN_SELF.include?(method) && results.kind_of?(Array) + + results + end + end # class Proxy + end # module OneToMany + end # module Associations +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/associations/one_to_one.rb b/vendor/dm-core-0.9.6/lib/dm-core/associations/one_to_one.rb new file mode 100644 index 0000000..56a0432 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/associations/one_to_one.rb @@ -0,0 +1,61 @@ +module DataMapper + module Associations + module OneToOne + extend Assertions + + # Setup one to one relationship between two models + # - + # @api private + def self.setup(name, model, options = {}) + assert_kind_of 'name', name, Symbol + assert_kind_of 'model', model, Model + assert_kind_of 'options', options, Hash + + repository_name = model.repository.name + + model.class_eval <<-EOS, __FILE__, __LINE__ + def #{name} + #{name}_association.first + end + + def #{name}=(child_resource) + #{name}_association.replace(child_resource.nil? ? [] : [ child_resource ]) + end + + private + + def #{name}_association + @#{name}_association ||= begin + unless relationship = model.relationships(#{repository_name.inspect})[:#{name}] + raise ArgumentError, "Relationship #{name.inspect} does not exist in \#{model}" + end + association = Associations::OneToMany::Proxy.new(relationship, self) + parent_associations << association + association + end + end + EOS + + model.relationships(repository_name)[name] = if options.has_key?(:through) + RelationshipChain.new( + :child_model => options.fetch(:class_name, Extlib::Inflection.classify(name)), + :parent_model => model, + :repository_name => repository_name, + :near_relationship_name => options[:through], + :remote_relationship_name => options.fetch(:remote_name, name), + :parent_key => options[:parent_key], + :child_key => options[:child_key] + ) + else + Relationship.new( + name, + repository_name, + options.fetch(:class_name, Extlib::Inflection.classify(name)), + model, + options + ) + end + end + end # module HasOne + end # module Associations +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/associations/relationship.rb b/vendor/dm-core-0.9.6/lib/dm-core/associations/relationship.rb new file mode 100644 index 0000000..d5f35b0 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/associations/relationship.rb @@ -0,0 +1,223 @@ +module DataMapper + module Associations + class Relationship + include Assertions + + OPTIONS = [ :class_name, :child_key, :parent_key, :min, :max, :through ] + + # @api private + attr_reader :name, :options, :query + + # @api private + def child_key + @child_key ||= begin + child_key = nil + child_model.repository.scope do |r| + model_properties = child_model.properties(r.name) + + child_key = parent_key.zip(@child_properties || []).map do |parent_property,property_name| + # TODO: use something similar to DM::NamingConventions to determine the property name + parent_name = Extlib::Inflection.underscore(Extlib::Inflection.demodulize(parent_model.base_model.name)) + property_name ||= "#{parent_name}_#{parent_property.name}".to_sym + + if model_properties.has_property?(property_name) + model_properties[property_name] + else + options = {} + + [ :length, :precision, :scale ].each do |option| + options[option] = parent_property.send(option) + end + + # NOTE: hack to make each many to many child_key a true key, + # until I can figure out a better place for this check + if child_model.respond_to?(:many_to_many) + options[:key] = true + end + + child_model.property(property_name, parent_property.primitive, options) + end + end + end + PropertySet.new(child_key) + end + end + + # @api private + def parent_key + @parent_key ||= begin + parent_key = nil + parent_model.repository.scope do |r| + parent_key = if @parent_properties + parent_model.properties(r.name).slice(*@parent_properties) + else + parent_model.key + end + end + PropertySet.new(parent_key) + end + end + + # @api private + def parent_model + Class === @parent_model ? @parent_model : (Class === @child_model ? @child_model.find_const(@parent_model) : Object.find_const(@parent_model)) + rescue NameError + raise NameError, "Cannot find the parent_model #{@parent_model} for #{@child_model}" + end + + # @api private + def child_model + Class === @child_model ? @child_model : (Class === @parent_model ? @parent_model.find_const(@child_model) : Object.find_const(@child_model)) + rescue NameError + raise NameError, "Cannot find the child_model #{@child_model} for #{@parent_model}" + end + + # @api private + def get_children(parent, options = {}, finder = :all, *args) + parent_value = parent_key.get(parent) + bind_values = [ parent_value ] + + with_repository(child_model) do |r| + parent_identity_map = parent.repository.identity_map(parent_model) + child_identity_map = r.identity_map(child_model) + + query_values = parent_identity_map.keys + query_values.reject! { |k| child_identity_map[k] } + + bind_values = query_values unless query_values.empty? + query = child_key.zip(bind_values.transpose).to_hash + + collection = child_model.send(finder, *(args.dup << @query.merge(options).merge(query))) + + return collection unless collection.kind_of?(Collection) && collection.any? + + grouped_collection = {} + collection.each do |resource| + child_value = child_key.get(resource) + parent_obj = parent_identity_map[child_value] + grouped_collection[parent_obj] ||= [] + grouped_collection[parent_obj] << resource + end + + association_accessor = "#{self.name}_association" + + ret = nil + grouped_collection.each do |parent, children| + association = parent.send(association_accessor) + + query = collection.query.dup + query.conditions.map! do |operator, property, bind_value| + if operator != :raw && child_key.has_property?(property.name) + bind_value = *children.map { |child| property.get(child) }.uniq + end + [ operator, property, bind_value ] + end + + parents_children = Collection.new(query) + children.each { |child| parents_children.send(:add, child) } + + if parent_key.get(parent) == parent_value + ret = parents_children + else + association.instance_variable_set(:@children, parents_children) + end + end + + ret || child_model.send(finder, *(args.dup << @query.merge(options).merge(child_key.zip([ parent_value ]).to_hash))) + end + end + + # @api private + def get_parent(child, parent = nil) + child_value = child_key.get(child) + return nil unless child_value.nitems == child_value.size + + with_repository(parent || parent_model) do + parent_identity_map = (parent || parent_model).repository.identity_map(parent_model.base_model) + child_identity_map = child.repository.identity_map(child_model.base_model) + + if parent = parent_identity_map[child_value] + return parent + end + + children = child_identity_map.values + children << child unless child_identity_map[child.key] + + bind_values = children.map { |c| child_key.get(c) }.uniq + query_values = bind_values.reject { |k| parent_identity_map[k] } + + bind_values = query_values unless query_values.empty? + query = parent_key.zip(bind_values.transpose).to_hash + association_accessor = "#{self.name}_association" + + collection = parent_model.send(:all, query) + unless collection.empty? + collection.send(:lazy_load) + children.each do |c| + c.send(association_accessor).instance_variable_set(:@parent, collection.get(*child_key.get(c))) + end + child.send(association_accessor).instance_variable_get(:@parent) + end + end + end + + # @api private + def with_repository(object = nil) + other_model = object.model == child_model ? parent_model : child_model if object.respond_to?(:model) + other_model = object == child_model ? parent_model : child_model if object.kind_of?(DataMapper::Resource) + + if other_model && other_model.repository == object.repository && object.repository.name != @repository_name + object.repository.scope { |block_args| yield(*block_args) } + else + repository(@repository_name) { |block_args| yield(*block_args) } + end + end + + # @api private + def attach_parent(child, parent) + child_key.set(child, parent && parent_key.get(parent)) + end + + private + + # +child_model_name and child_properties refers to the FK, parent_model_name + # and parent_properties refer to the PK. For more information: + # http://edocs.bea.com/kodo/docs41/full/html/jdo_overview_mapping_join.html + # I wash my hands of it! + def initialize(name, repository_name, child_model, parent_model, options = {}) + assert_kind_of 'name', name, Symbol + assert_kind_of 'repository_name', repository_name, Symbol + assert_kind_of 'child_model', child_model, String, Class + assert_kind_of 'parent_model', parent_model, String, Class + + if child_properties = options[:child_key] + assert_kind_of 'options[:child_key]', child_properties, Array + end + + if parent_properties = options[:parent_key] + assert_kind_of 'options[:parent_key]', parent_properties, Array + end + + @name = name + @repository_name = repository_name + @child_model = child_model + @child_properties = child_properties # may be nil + @query = options.reject { |k,v| OPTIONS.include?(k) } + @parent_model = parent_model + @parent_properties = parent_properties # may be nil + @options = options + + # attempt to load the child_key if the parent and child model constants are defined + if model_defined?(@child_model) && model_defined?(@parent_model) + child_key + end + end + + # @api private + def model_defined?(model) + # TODO: figure out other ways to see if the model is loaded + model.kind_of?(Class) + end + end # class Relationship + end # module Associations +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/associations/relationship_chain.rb b/vendor/dm-core-0.9.6/lib/dm-core/associations/relationship_chain.rb new file mode 100644 index 0000000..2ea2bba --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/associations/relationship_chain.rb @@ -0,0 +1,81 @@ +module DataMapper + module Associations + class RelationshipChain < Relationship + OPTIONS = [ + :repository_name, :near_relationship_name, :remote_relationship_name, + :child_model, :parent_model, :parent_key, :child_key, + :min, :max + ] + + undef_method :get_parent + undef_method :attach_parent + + # @api private + def child_model + near_relationship.child_model + end + + # @api private + def get_children(parent, options = {}, finder = :all, *args) + query = @query.merge(options).merge(child_key.to_query(parent_key.get(parent))) + + query[:links] = links + query[:unique] = true + + with_repository(parent) do + results = grandchild_model.send(finder, *(args << query)) + # FIXME: remove the need for the uniq.freeze + finder == :all ? (@mutable ? results : results.freeze) : results + end + end + + private + + # @api private + def initialize(options) + if (missing_options = OPTIONS - [ :min, :max ] - options.keys ).any? + raise ArgumentError, "The options #{missing_options * ', '} are required", caller + end + + @repository_name = options.fetch(:repository_name) + @near_relationship_name = options.fetch(:near_relationship_name) + @remote_relationship_name = options.fetch(:remote_relationship_name) + @child_model = options.fetch(:child_model) + @parent_model = options.fetch(:parent_model) + @parent_properties = options.fetch(:parent_key) + @child_properties = options.fetch(:child_key) + @mutable = options.delete(:mutable) || false + + @name = near_relationship.name + @query = options.reject{ |key,val| OPTIONS.include?(key) } + @extra_links = [] + @options = options + end + + # @api private + def near_relationship + parent_model.relationships[@near_relationship_name] + end + + # @api private + def links + if remote_relationship.kind_of?(RelationshipChain) + remote_relationship.instance_eval { links } + [remote_relationship.instance_eval { near_relationship } ] + else + [ remote_relationship ] + end + end + + # @api private + def remote_relationship + near_relationship.child_model.relationships[@remote_relationship_name] || + near_relationship.child_model.relationships[@remote_relationship_name.to_s.singularize.to_sym] + end + + # @api private + def grandchild_model + Class === @child_model ? @child_model : (Class === @parent_model ? @parent_model.find_const(@child_model) : Object.find_const(@child_model)) + end + end # class Relationship + end # module Associations +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/auto_migrations.rb b/vendor/dm-core-0.9.6/lib/dm-core/auto_migrations.rb new file mode 100644 index 0000000..1c8065d --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/auto_migrations.rb @@ -0,0 +1,113 @@ +# TODO: move to dm-more/dm-migrations + +module DataMapper + class AutoMigrator + ## + # Destructively automigrates the data-store to match the model. + # First migrates all models down and then up. + # REPEAT: THIS IS DESTRUCTIVE + # + # @param Symbol repository_name the repository to be migrated + def self.auto_migrate(repository_name = nil, *descendants) + auto_migrate_down(repository_name, *descendants) + auto_migrate_up(repository_name, *descendants) + end + + ## + # Destructively automigrates the data-store down + # REPEAT: THIS IS DESTRUCTIVE + # + # @param Symbol repository_name the repository to be migrated + # @calls DataMapper::Resource#auto_migrate_down! + # @api private + def self.auto_migrate_down(repository_name = nil, *descendants) + descendants = DataMapper::Resource.descendants.to_a if descendants.empty? + descendants.reverse.each do |model| + model.auto_migrate_down!(repository_name) + end + end + + ## + # Automigrates the data-store up + # + # @param Symbol repository_name the repository to be migrated + # @calls DataMapper::Resource#auto_migrate_up! + # @api private + def self.auto_migrate_up(repository_name = nil, *descendants) + descendants = DataMapper::Resource.descendants.to_a if descendants.empty? + descendants.each do |model| + model.auto_migrate_up!(repository_name) + end + end + + ## + # Safely migrates the data-store to match the model + # preserving data already in the data-store + # + # @param Symbol repository_name the repository to be migrated + # @calls DataMapper::Resource#auto_upgrade! + def self.auto_upgrade(repository_name = nil) + DataMapper::Resource.descendants.each do |model| + model.auto_upgrade!(repository_name) + end + end + end # class AutoMigrator + + module AutoMigrations + ## + # Destructively automigrates the data-store to match the model + # REPEAT: THIS IS DESTRUCTIVE + # + # @param Symbol repository_name the repository to be migrated + def auto_migrate!(repository_name = self.repository_name) + auto_migrate_down!(repository_name) + auto_migrate_up!(repository_name) + end + + ## + # Destructively migrates the data-store down, which basically + # deletes all the models. + # REPEAT: THIS IS DESTRUCTIVE + # + # @param Symbol repository_name the repository to be migrated + # @api private + def auto_migrate_down!(repository_name = self.repository_name) + # repository_name ||= default_repository_name + if self.superclass != Object + self.superclass.auto_migrate!(repository_name) + else + repository(repository_name) do |r| + r.adapter.destroy_model_storage(r, self) + end + end + end + + ## + # Auto migrates the data-store to match the model + # + # @param Symbol repository_name the repository to be migrated + # @api private + def auto_migrate_up!(repository_name = self.repository_name) + if self.superclass != Object + self.superclass.auto_migrate!(repository_name) + else + repository(repository_name) do |r| + r.adapter.create_model_storage(r, self) + end + end + end + + ## + # Safely migrates the data-store to match the model + # preserving data already in the data-store + # + # @param Symbol repository_name the repository to be migrated + def auto_upgrade!(repository_name = self.repository_name) + repository(repository_name) do |r| + r.adapter.upgrade_model_storage(r, self) + end + end + + Model.send(:include, self) + end # module AutoMigrations +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/collection.rb b/vendor/dm-core-0.9.6/lib/dm-core/collection.rb new file mode 100644 index 0000000..c19dfa0 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/collection.rb @@ -0,0 +1,642 @@ +module DataMapper + class Collection < LazyArray + include Assertions + + attr_reader :query + + ## + # @return [Repository] the repository the collection is + # associated with + # + # @api public + def repository + query.repository + end + + ## + # loads the entries for the collection. Used by the + # adapters to load the instances of the declared + # model for this collection's query. + # + # @api private + def load(values) + add(model.load(values, query)) + end + + ## + # reloads the entries associated with this collection + # + # @param [DataMapper::Query] query (optional) additional query + # to scope by. Use this if you want to query a collections result + # set + # + # @see DataMapper::Collection#all + # + # @api public + def reload(query = {}) + @query = scoped_query(query) + @query.update(:fields => @query.fields | @key_properties) + replace(all(:reload => true)) + end + + ## + # retrieves an entry out of the collection's entry by key + # + # @param [DataMapper::Types::*, ...] key keys which uniquely + # identify a resource in the collection + # + # @return [DataMapper::Resource, NilClass] the resource which + # has the supplied keys + # + # @api public + def get(*key) + key = model.typecast_key(key) + if loaded? + # find indexed resource (create index first if it does not exist) + each {|r| @cache[r.key] = r } if @cache.empty? + @cache[key] + elsif query.limit || query.offset > 0 + # current query is exclusive, find resource within the set + + # TODO: use a subquery to retrieve the collection and then match + # it up against the key. This will require some changes to + # how subqueries are generated, since the key may be a + # composite key. In the case of DO adapters, it means subselects + # like the form "(a, b) IN(SELECT a,b FROM ...)", which will + # require making it so the Query condition key can be a + # Property or an Array of Property objects + + # use the brute force approach until subquery lookups work + lazy_load + get(*key) + else + # current query is all inclusive, lookup using normal approach + first(model.to_query(repository, key)) + end + end + + ## + # retrieves an entry out of the collection's entry by key, + # raising an exception if the object cannot be found + # + # @param [DataMapper::Types::*, ...] key keys which uniquely + # identify a resource in the collection + # + # @calls DataMapper::Collection#get + # + # @raise [ObjectNotFoundError] "Could not find #{model.name} with key #{key.inspect} in collection" + # + # @api public + def get!(*key) + get(*key) || raise(ObjectNotFoundError, "Could not find #{model.name} with key #{key.inspect} in collection") + end + + ## + # Further refines a collection's conditions. #all provides an + # interface which simulates a database view. + # + # @param [Hash[Symbol, Object], DataMapper::Query] query parameters for + # an query within the results of the original query. + # + # @return [DataMapper::Collection] a collection whose query is the result + # of a merge + # + # @api public + def all(query = {}) + # TODO: this shouldn't be a kicker if scoped_query() is called + return self if query.kind_of?(Hash) ? query.empty? : query == self.query + query = scoped_query(query) + query.repository.read_many(query) + end + + ## + # Simulates Array#first by returning the first entry (when + # there are no arguments), or transforms the collection's query + # by applying :limit => n when you supply an Integer. If you + # provide a conditions hash, or a Query object, the internal + # query is scoped and a new collection is returned + # + # @param [Integer, Hash[Symbol, Object], Query] args + # + # @return [DataMapper::Resource, DataMapper::Collection] The + # first resource in the entries of this collection, or + # a new collection whose query has been merged + # + # @api public + def first(*args) + # TODO: this shouldn't be a kicker if scoped_query() is called + if loaded? + if args.empty? + return super + elsif args.size == 1 && args.first.kind_of?(Integer) + limit = args.shift + return self.class.new(scoped_query(:limit => limit)) { |c| c.replace(super(limit)) } + end + end + + query = args.last.respond_to?(:merge) ? args.pop : {} + query = scoped_query(query.merge(:limit => args.first || 1)) + + if args.any? + query.repository.read_many(query) + else + query.repository.read_one(query) + end + end + + ## + # Simulates Array#last by returning the last entry (when + # there are no arguments), or transforming the collection's + # query by reversing the declared order, and applying + # :limit => n when you supply an Integer. If you + # supply a conditions hash, or a Query object, the + # internal query is scoped and a new collection is returned + # + # @calls Collection#first + # + # @api public + def last(*args) + return super if loaded? && args.empty? + + reversed = reverse + + # tell the collection to reverse the order of the + # results coming out of the adapter + reversed.query.add_reversed = !query.add_reversed? + + reversed.first(*args) + end + + ## + # Simulates Array#at and returns the entry at that index. + # Also accepts negative indexes and appropriate reverses + # the order of the query + # + # @calls Collection#first + # @calls Collection#last + # + # @api public + def at(offset) + return super if loaded? + offset >= 0 ? first(:offset => offset) : last(:offset => offset.abs - 1) + end + + ## + # Simulates Array#slice and returns a new Collection + # whose query has a new offset or limit according to the + # arguments provided. + # + # If you provide a range, the min is used as the offset + # and the max minues the offset is used as the limit. + # + # @param [Integer, Array(Integer), Range] args the offset, + # offset and limit, or range indicating offsets and limits + # + # @return [DataMapper::Resource, DataMapper::Collection] + # The entry which resides at that offset and limit, + # or a new Collection object with the set limits and offset + # + # @raise [ArgumentError] "arguments may be 1 or 2 Integers, + # or 1 Range object, was: #{args.inspect}" + # + # @alias [] + # + # @api public + def slice(*args) + return at(args.first) if args.size == 1 && args.first.kind_of?(Integer) + + if args.size == 2 && args.first.kind_of?(Integer) && args.last.kind_of?(Integer) + offset, limit = args + elsif args.size == 1 && args.first.kind_of?(Range) + range = args.first + offset = range.first + limit = range.last - offset + limit += 1 unless range.exclude_end? + else + raise ArgumentError, "arguments may be 1 or 2 Integers, or 1 Range object, was: #{args.inspect}", caller + end + + all(:offset => offset, :limit => limit) + end + + alias [] slice + + ## + # + # @return [DataMapper::Collection] a new collection whose + # query is sorted in the reverse + # + # @see Array#reverse, DataMapper#all, DataMapper::Query#reverse + # + # @api public + def reverse + all(self.query.reverse) + end + + ## + # @see Array#<< + # + # @api public + def <<(resource) + super + relate_resource(resource) + self + end + + ## + # @see Array#push + # + # @api public + def push(*resources) + super + resources.each { |resource| relate_resource(resource) } + self + end + + ## + # @see Array#unshift + # + # @api public + def unshift(*resources) + super + resources.each { |resource| relate_resource(resource) } + self + end + + ## + # @see Array#replace + # + # @api public + def replace(other) + if loaded? + each { |resource| orphan_resource(resource) } + end + super + other.each { |resource| relate_resource(resource) } + self + end + + ## + # @see Array#pop + # + # @api public + def pop + orphan_resource(super) + end + + ## + # @see Array#shift + # + # @api public + def shift + orphan_resource(super) + end + + ## + # @see Array#delete + # + # @api public + def delete(resource) + orphan_resource(super) + end + + ## + # @see Array#delete_at + # + # @api public + def delete_at(index) + orphan_resource(super) + end + + ## + # @see Array#clear + # + # @api public + def clear + if loaded? + each { |resource| orphan_resource(resource) } + end + super + self + end + + # builds a new resource and appends it to the collection + # + # @param Hash[Symbol => Object] attributes attributes which + # the new resource should have. + # + # @api public + def build(attributes = {}) + repository.scope do + resource = model.new(default_attributes.merge(attributes)) + self << resource + resource + end + end + + ## + # creates a new resource, saves it, and appends it to the collection + # + # @param Hash[Symbol => Object] attributes attributes which + # the new resource should have. + # + # @api public + def create(attributes = {}) + repository.scope do + resource = model.create(default_attributes.merge(attributes)) + self << resource unless resource.new_record? + resource + end + end + + def update(attributes = {}, preload = false) + raise NotImplementedError, 'update *with* validations has not be written yet, try update!' + end + + ## + # batch updates the entries belongs to this collection, and skip + # validations for all resources. + # + # @example Reached the Age of Alchohol Consumption + # Person.all(:age.gte => 21).update!(:allow_beer => true) + # + # @param attributes Hash[Symbol => Object] attributes to update + # @param reload [FalseClass, TrueClass] if set to true, collection + # will have loaded resources reflect updates. + # + # @return [TrueClass, FalseClass] + # TrueClass indicates that all entries were affected + # FalseClass indicates that some entries were affected + # + # @api public + def update!(attributes = {}, reload = false) + # TODO: delegate to Model.update + return true if attributes.empty? + + dirty_attributes = {} + + model.properties(repository.name).slice(*attributes.keys).each do |property| + dirty_attributes[property] = attributes[property.name] if property + end + + # this should never be done on update! even if collection is loaded. or? + # each { |resource| resource.attributes = attributes } if loaded? + + changes = repository.update(dirty_attributes, scoped_query) + + # need to decide if this should be done in update! + query.update(attributes) + + if identity_map.any? && reload + reload_query = @key_properties.zip(identity_map.keys.transpose).to_hash + model.all(reload_query.merge(attributes)).reload(:fields => attributes.keys) + end + + # this should return true if there are any changes at all. as it skips validations + # the only way it could be fewer changes is if some resources already was updated. + # that should not return false? true = 'now all objects have these new values' + return loaded? ? changes == size : changes > 0 + end + + def destroy + raise NotImplementedError, 'destroy *with* validations has not be written yet, try destroy!' + end + + ## + # batch destroy the entries belongs to this collection, and skip + # validations for all resources. + # + # @example The War On Terror (if only it were this easy) + # Person.all(:terrorist => true).destroy() # + # + # @return [TrueClass, FalseClass] + # TrueClass indicates that all entries were affected + # FalseClass indicates that some entries were affected + # + # @api public + def destroy! + # TODO: delegate to Model.destroy + if loaded? + return false unless repository.delete(scoped_query) == size + + each do |resource| + resource.instance_variable_set(:@new_record, true) + identity_map.delete(resource.key) + resource.dirty_attributes.clear + + model.properties(repository.name).each do |property| + next unless resource.attribute_loaded?(property.name) + resource.dirty_attributes[property] = property.get(resource) + end + end + else + return false unless repository.delete(scoped_query) > 0 + end + + clear + + true + end + + ## + # @return [DataMapper::PropertySet] The set of properties this + # query will be retrieving + # + # @api public + def properties + PropertySet.new(query.fields) + end + + ## + # @return [DataMapper::Relationship] The model's relationships + # + # @api public + def relationships + model.relationships(repository.name) + end + + ## + # default values to use when creating a Resource within the Collection + # + # @return [Hash] The default attributes for DataMapper::Collection#create + # + # @see DataMapper::Collection#create + # + # @api public + def default_attributes + default_attributes = {} + query.conditions.each do |tuple| + operator, property, bind_value = *tuple + + next unless operator == :eql && + property.kind_of?(DataMapper::Property) && + ![ Array, Range ].any? { |k| bind_value.kind_of?(k) } + !@key_properties.include?(property) + + default_attributes[property.name] = bind_value + end + default_attributes + end + + ## + # check to see if collection can respond to the method + # + # @param method [Symbol] method to check in the object + # @param include_private [FalseClass, TrueClass] if set to true, + # collection will check private methods + # + # @return [TrueClass, FalseClass] + # TrueClass indicates the method can be responded to by the collection + # FalseClass indicates the method can not be responded to by the collection + # + # @api public + def respond_to?(method, include_private = false) + super || model.public_methods(false).include?(method.to_s) || relationships.has_key?(method) + end + + protected + + ## + # @api private + def model + query.model + end + + private + + ## + # @api public + def initialize(query, &block) + assert_kind_of 'query', query, Query + + unless block_given? + # It can be helpful (relationship.rb: 112-13, used for SEL) to have a non-lazy Collection. + block = lambda {} + end + + @query = query + @key_properties = model.key(repository.name) + @cache = {} + + super() + + load_with(&block) + end + + ## + # @api private + def add(resource) + query.add_reversed? ? unshift(resource) : push(resource) + resource + end + + ## + # @api private + def relate_resource(resource) + return unless resource + resource.collection = self + @cache[resource.key] = resource + resource + end + + ## + # @api private + def orphan_resource(resource) + return unless resource + resource.collection = nil if resource.collection.object_id == self.object_id + @cache.delete(resource.key) + resource + end + + ## + # @api private + def scoped_query(query = self.query) + assert_kind_of 'query', query, Query, Hash + + query.update(keys) if loaded? + + return self.query if query == self.query + + query = if query.kind_of?(Hash) + Query.new(query.has_key?(:repository) ? query.delete(:repository) : self.repository, model, query) + else + query + end + + if query.limit || query.offset > 0 + set_relative_position(query) + end + + self.query.merge(query) + end + + ## + # @api private + def keys + keys = map {|r| r.key } + keys.any? ? @key_properties.zip(keys.transpose).to_hash : {} + end + + ## + # @api private + def identity_map + repository.identity_map(model) + end + + ## + # @api private + def set_relative_position(query) + return if query == self.query + + if query.offset == 0 + return if !query.limit.nil? && !self.query.limit.nil? && query.limit <= self.query.limit + return if query.limit.nil? && self.query.limit.nil? + end + + first_pos = self.query.offset + query.offset + last_pos = self.query.offset + self.query.limit if self.query.limit + + if limit = query.limit + if last_pos.nil? || first_pos + limit < last_pos + last_pos = first_pos + limit + end + end + + if last_pos && first_pos >= last_pos + raise 'outside range' # TODO: raise a proper exception object + end + + query.update(:offset => first_pos) + query.update(:limit => last_pos - first_pos) if last_pos + end + + ## + # @api private + def method_missing(method, *args, &block) + if model.public_methods(false).include?(method.to_s) + model.send(:with_scope, query) do + model.send(method, *args, &block) + end + elsif relationship = relationships[method] + klass = model == relationship.child_model ? relationship.parent_model : relationship.child_model + + # TODO: when self.query includes an offset/limit use it as a + # subquery to scope the results rather than a join + + query = Query.new(repository, klass) + query.conditions.push(*self.query.conditions) + query.update(relationship.query) + query.update(args.pop) if args.last.kind_of?(Hash) + + query.update( + :fields => klass.properties(repository.name).defaults, + :links => [ relationship ] + self.query.links + ) + + klass.all(query, &block) + else + super + end + end + end # class Collection +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/dependency_queue.rb b/vendor/dm-core-0.9.6/lib/dm-core/dependency_queue.rb new file mode 100644 index 0000000..9572d73 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/dependency_queue.rb @@ -0,0 +1,32 @@ +module DataMapper + ## + # + # DataMapper's DependencyQueue is used to store callbacks for classes which + # may or may not be loaded already. + # + class DependencyQueue + def initialize + @dependencies = {} + end + + def add(class_name, &callback) + @dependencies[class_name] ||= [] + @dependencies[class_name] << callback + resolve! + end + + def resolve! + @dependencies.each do |class_name, callbacks| + begin + klass = Object.find_const(class_name) + callbacks.each do |callback| + callback.call(klass) + end + callbacks.clear + rescue NameError + end + end + end + + end # class DependencyQueue +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/hook.rb b/vendor/dm-core-0.9.6/lib/dm-core/hook.rb new file mode 100644 index 0000000..8d7731c --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/hook.rb @@ -0,0 +1,11 @@ +module DataMapper + module Hook + def self.included(model) + model.class_eval <<-EOS, __FILE__, __LINE__ + include Extlib::Hook + register_instance_hooks :save, :create, :update, :destroy + EOS + end + end + DataMapper::Resource.append_inclusions Hook +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/identity_map.rb b/vendor/dm-core-0.9.6/lib/dm-core/identity_map.rb new file mode 100644 index 0000000..9bea4f8 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/identity_map.rb @@ -0,0 +1,42 @@ +module DataMapper + + # Tracks objects to help ensure that each object gets loaded only once. + # See: http://www.martinfowler.com/eaaCatalog/identityMap.html + class IdentityMap + # Get a resource from the IdentityMap + def get(key) + @cache[key] || (@second_level_cache && @second_level_cache.get(key)) + end + + alias [] get + + # Add a resource to the IdentityMap + def set(key, resource) + @second_level_cache.set(key, resource) if @second_level_cache + @cache[key] = resource + end + + alias []= set + + # Remove a resource from the IdentityMap + def delete(key) + @second_level_cache.delete(key) if @second_level_cache + @cache.delete(key) + end + + private + + def initialize(second_level_cache = nil) + @cache = {} + @second_level_cache = second_level_cache + end + + def cache + @cache + end + + def method_missing(method, *args, &block) + cache.__send__(method, *args, &block) + end + end # class IdentityMap +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/is.rb b/vendor/dm-core-0.9.6/lib/dm-core/is.rb new file mode 100644 index 0000000..f72bc1d --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/is.rb @@ -0,0 +1,16 @@ +module DataMapper + module Is + + def is(plugin, *pars, &block) + generator_method = "is_#{plugin}".to_sym + + if self.respond_to?(generator_method) + self.send(generator_method, *pars, &block) + else + raise PluginNotFoundError, "could not find plugin named #{plugin}" + end + end + + Model.send(:include, self) + end # module Is +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/logger.rb b/vendor/dm-core-0.9.6/lib/dm-core/logger.rb new file mode 100644 index 0000000..441e0d0 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/logger.rb @@ -0,0 +1,232 @@ +require "time" # httpdate +# ==== Public DataMapper Logger API +# +# Logger taken from Merb :) +# +# To replace an existing logger with a new one: +# DataMapper.logger.set_log(log{String, IO},level{Symbol, String}) +# +# Available logging levels are: +# :off, :fatal, :error, :warn, :info, :debug +# +# Logging via: +# DataMapper.logger.fatal(message) +# DataMapper.logger.error(message) +# DataMapper.logger.warn(message) +# DataMapper.logger.info(message) +# DataMapper.logger.debug(message) +# +# Flush the buffer to +# DataMapper.logger.flush +# +# Remove the current log object +# DataMapper.logger.close +# +# ==== Private DataMapper Logger API +# +# To initialize the logger you create a new object, proxies to set_log. +# DataMapper::Logger.new(log{String, IO}, level{Symbol, String}) +# +# Logger will not create the file until something is actually logged +# This avoids file creation on DataMapper init when it creates the +# default logger. +module DataMapper + + class << self #:nodoc: + attr_accessor :logger + end + + class Logger + + attr_accessor :aio + attr_accessor :delimiter + attr_reader :level + attr_reader :buffer + attr_reader :log + + # @note + # Ruby (standard) logger levels: + # off: absolutely nothing + # fatal: an unhandleable error that results in a program crash + # error: a handleable error condition + # warn: a warning + # info: generic (useful) information about system operation + # debug: low-level information for developers + # + # DataMapper::Logger::LEVELS[:off, :fatal, :error, :warn, :info, :debug] + LEVELS = + { + :off => 99999, + :fatal => 7, + :error => 6, + :warn => 4, + :info => 3, + :debug => 0 + } + + def level=(new_level) + @level = LEVELS[new_level.to_sym] + reset_methods(:close) + end + + private + + # The idea here is that instead of performing an 'if' conditional check on + # each logging we do it once when the log object is setup + def set_write_method + @log.instance_eval do + + # Determine if asynchronous IO can be used + def aio? + @aio = !RUBY_PLATFORM.match(/java|mswin/) && + !(@log == STDOUT) && + @log.respond_to?(:write_nonblock) + end + + # Define the write method based on if aio an be used + undef write_method if defined? write_method + if aio? + alias :write_method :write_nonblock + else + alias :write_method :write + end + end + end + + def initialize_log(log) + close if @log # be sure that we don't leave open files laying around. + @log = log || "log/dm.log" + end + + def reset_methods(o_or_c) + if o_or_c == :open + alias internal_push push_opened + elsif o_or_c == :close + alias internal_push push_closed + end + end + + def push_opened(string) + message = Time.now.httpdate + message << delimiter + message << string + message << "\n" unless message[-1] == ?\n + @buffer << message + flush # Force a flush for now until we figure out where we want to use the buffering. + end + + def push_closed(string) + unless @log.respond_to?(:write) + log = Pathname(@log) + log.dirname.mkpath + @log = log.open('a') + @log.sync = true + end + set_write_method + reset_methods(:open) + push(string) + end + + alias internal_push push_closed + + def prep_msg(message, level) + level << delimiter << message + end + + public + + # To initialize the logger you create a new object, proxies to set_log. + # DataMapper::Logger.new(log{String, IO},level{Symbol, String}) + # + # @param log either an IO object or a name of a logfile. + # @param log_level the message string to be logged + # @param delimiter delimiter to use between message sections + # @param log_creation log that the file is being created + def initialize(*args) + set_log(*args) + end + + # To replace an existing logger with a new one: + # DataMapper.logger.set_log(log{String, IO},level{Symbol, String}) + # + # @param log either an IO object or a name of a logfile. + # @param log_level a symbol representing the log level from + # {:off, :fatal, :error, :warn, :info, :debug} + # @param delimiter delimiter to use between message sections + # @param log_creation log that the file is being created + def set_log(log, log_level = :off, delimiter = " ~ ", log_creation = false) + delimiter ||= " ~ " + + if log_level && LEVELS[log_level.to_sym] + self.level = log_level.to_sym + else + self.level = :debug + end + + @buffer = [] + @delimiter = delimiter + + initialize_log(log) + + DataMapper.logger = self + + self.info("Logfile created") if log_creation + end + + # Flush the entire buffer to the log object. + # DataMapper.logger.flush + # + def flush + return unless @buffer.size > 0 + @log.write_method(@buffer.slice!(0..-1).to_s) + end + + # Close and remove the current log object. + # DataMapper.logger.close + # + def close + flush + @log.close if @log.respond_to?(:close) + @log = nil + end + + # Appends a string and log level to logger's buffer. + + # @note + # Note that the string is discarded if the string's log level less than the + # logger's log level. + # @note + # Note that if the logger is aio capable then the logger will use + # non-blocking asynchronous writes. + # + # @param level the logging level as an integer + # @param string the message string to be logged + def push(string) + internal_push(string) + end + alias << push + + # Generate the following logging methods for DataMapper.logger as described + # in the API: + # :fatal, :error, :warn, :info, :debug + # :off only gets a off? method + LEVELS.each_pair do |name, number| + unless name.to_s == 'off' + class_eval <<-EOS, __FILE__, __LINE__ + # DOC + def #{name}(message) + self.<<( prep_msg(message, "#{name}") ) if #{name}? + end + EOS + end + + class_eval <<-EOS, __FILE__, __LINE__ + # DOC + def #{name}? + #{number} >= level + end + EOS + end + + end # class Logger +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/migrations/destructive_migrations.rb b/vendor/dm-core-0.9.6/lib/dm-core/migrations/destructive_migrations.rb new file mode 100644 index 0000000..9e6066d --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/migrations/destructive_migrations.rb @@ -0,0 +1,17 @@ +# TODO: move to dm-more/dm-migrations + +module DataMapper + module DestructiveMigrations + def self.included(model) + DestructiveMigrator.models << model + end + end # module DestructiveMigrations + + class DestructiveMigrator < Migrator + def self.migrate(repository_name) + models.each do |model| + model.auto_migrate! + end + end + end # class DestructiveMigrator +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/migrator.rb b/vendor/dm-core-0.9.6/lib/dm-core/migrator.rb new file mode 100644 index 0000000..e166eba --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/migrator.rb @@ -0,0 +1,29 @@ +# TODO: move to dm-more/dm-migrations + +module DataMapper + class Migrator + def self.subclasses + @@subclasses ||= [] + end + + def self.subclasses=(obj) + @@subclasses = obj + end + + def self.inherited(klass) + subclasses << klass + + class << klass + def models + @models ||= [] + end + end + end + + def self.migrate(repository_name) + subclasses.collect do |migrator| + migrator.migrate(repository_name) + end.flatten + end + end # class Migrator +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/model.rb b/vendor/dm-core-0.9.6/lib/dm-core/model.rb new file mode 100644 index 0000000..46f63b0 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/model.rb @@ -0,0 +1,488 @@ +require 'set' + +module DataMapper + module Model + ## + # + # Extends the model with this module after DataMapper::Resource has been + # included. + # + # This is a useful way to extend DataMapper::Model while + # still retaining a self.extended method. + # + # @param [Module] extensions the module that is to be extend the model after + # after DataMapper::Model + # + # @return [TrueClass, FalseClass] whether or not the inclusions have been + # successfully appended to the list + #- + # @api public + # + # TODO: Move this do DataMapper::Model when DataMapper::Model is created + def self.append_extensions(*extensions) + extra_extensions.concat extensions + true + end + + def self.extra_extensions + @extra_extensions ||= [] + end + + def self.extended(model) + model.instance_variable_set(:@storage_names, {}) + model.instance_variable_set(:@properties, {}) + model.instance_variable_set(:@field_naming_conventions, {}) + extra_extensions.each { |extension| model.extend(extension) } + end + + def inherited(target) + target.instance_variable_set(:@storage_names, @storage_names.dup) + target.instance_variable_set(:@properties, {}) + target.instance_variable_set(:@base_model, self.base_model) + target.instance_variable_set(:@paranoid_properties, @paranoid_properties) + target.instance_variable_set(:@field_naming_conventions, @field_naming_conventions.dup) + + if self.respond_to?(:validators) + @validations.contexts.each do |context, validators| + validators.each { |validator| target.validators.context(context) << validator } + end + end + + @properties.each do |repository_name,properties| + repository(repository_name) do + properties.each do |property| + next if target.properties(repository_name).has_property?(property.name) + target.property(property.name, property.type, property.options.dup) + end + end + end + + if @relationships + duped_relationships = {} + @relationships.each do |repository_name,relationships| + relationships.each do |name, relationship| + dup = relationship.dup + dup.instance_variable_set(:@child_model, target) if dup.instance_variable_get(:@child_model) == self + dup.instance_variable_set(:@parent_model, target) if dup.instance_variable_get(:@parent_model) == self + duped_relationships[repository_name] ||= {} + duped_relationships[repository_name][name] = dup + end + end + target.instance_variable_set(:@relationships, duped_relationships) + end + end + + def self.new(storage_name, &block) + model = Class.new + model.send(:include, Resource) + model.class_eval <<-EOS, __FILE__, __LINE__ + def self.default_storage_name + #{Extlib::Inflection.classify(storage_name).inspect} + end + EOS + model.instance_eval(&block) if block_given? + model + end + + def base_model + @base_model ||= self + end + + def repository_name + Repository.context.any? ? Repository.context.last.name : default_repository_name + end + + ## + # Get the repository with a given name, or the default one for the current + # context, or the default one for this class. + # + # @param name the name of the repository wanted + # @param block block to execute with the fetched repository as parameter + # + # @return whatever the block returns, + # if given a block, otherwise the requested repository. + #- + # @api public + def repository(name = nil) + # + # There has been a couple of different strategies here, but me (zond) and dkubb are at least + # united in the concept of explicitness over implicitness. That is - the explicit wish of the + # caller (+name+) should be given more priority than the implicit wish of the caller (Repository.context.last). + # + if block_given? + DataMapper.repository(name || repository_name) { |*block_args| yield(*block_args) } + else + DataMapper.repository(name || repository_name) + end + end + + ## + # the name of the storage recepticle for this resource. IE. table name, for database stores + # + # @return the storage name (IE table name, for database stores) associated with this resource in the given repository + def storage_name(repository_name = default_repository_name) + @storage_names[repository_name] ||= repository(repository_name).adapter.resource_naming_convention.call(base_model.send(:default_storage_name)) + end + + ## + # the names of the storage recepticles for this resource across all repositories + # + # @return String)> All available names of storage recepticles + def storage_names + @storage_names + end + + ## + # The field naming conventions for this resource across all repositories. + # + # @return The naming convention for the given repository + def field_naming_convention(repository_name = default_storage_name) + @field_naming_conventions[repository_name] ||= repository(repository_name).adapter.field_naming_convention + end + + ## + # defines a property on the resource + # + # @param name the name for which to call this property + # @param type the type to define this property ass + # @param String)> options a hash of available options + # @see DataMapper::Property + def property(name, type, options = {}) + property = Property.new(self, name, type, options) + + create_property_getter(property) + create_property_setter(property) + + properties(repository_name)[property.name] = property + @_valid_relations = false + + # Add property to the other mappings as well if this is for the default + # repository. + if repository_name == default_repository_name + @properties.each_pair do |repository_name, properties| + next if repository_name == default_repository_name + properties << property unless properties.has_property?(property.name) + end + end + + # Add the property to the lazy_loads set for this resources repository + # only. + # TODO Is this right or should we add the lazy contexts to all + # repositories? + if property.lazy? + context = options.fetch(:lazy, :default) + context = :default if context == true + + Array(context).each do |item| + properties(repository_name).lazy_context(item) << name + end + end + + # add the property to the child classes only if the property was + # added after the child classes' properties have been copied from + # the parent + if respond_to?(:descendants) + descendants.each do |model| + next if model.properties(repository_name).has_property?(name) + model.property(name, type, options) + end + end + + property + end + + def repositories + [ repository ].to_set + @properties.keys.collect { |repository_name| DataMapper.repository(repository_name) } + end + + def properties(repository_name = default_repository_name) + # We need to check whether all relations are already set up. + # If this isn't the case, we try to reload them here + if !@_valid_relations && respond_to?(:many_to_one_relationships) + @_valid_relations = true + begin + many_to_one_relationships.each do |r| + r.child_key + end + rescue NameError + # Apparently not all relations are loaded, + # so we will try again later on + @_valid_relations = false + end + end + @properties[repository_name] ||= repository_name == Repository.default_name ? PropertySet.new : properties(Repository.default_name).dup + end + + def eager_properties(repository_name = default_repository_name) + properties(repository_name).defaults + end + + # @api private + def properties_with_subclasses(repository_name = default_repository_name) + properties = PropertySet.new + ([ self ].to_set + (respond_to?(:descendants) ? descendants : [])).each do |model| + model.relationships(repository_name).each_value { |relationship| relationship.child_key } + model.many_to_one_relationships.each do |relationship| relationship.child_key end + model.properties(repository_name).each do |property| + properties << property unless properties.has_property?(property.name) + end + end + properties + end + + def key(repository_name = default_repository_name) + properties(repository_name).key + end + + def default_order(repository_name = default_repository_name) + @default_order ||= {} + @default_order[repository_name] ||= key(repository_name).map { |property| Query::Direction.new(property) } + end + + def get(*key) + key = typecast_key(key) + repository.identity_map(self).get(key) || first(to_query(repository, key)) + end + + def get!(*key) + get(*key) || raise(ObjectNotFoundError, "Could not find #{self.name} with key #{key.inspect}") + end + + def all(query = {}) + query = scoped_query(query) + query.repository.read_many(query) + end + + def first(*args) + query = args.last.respond_to?(:merge) ? args.pop : {} + query = scoped_query(query.merge(:limit => args.first || 1)) + + if args.any? + query.repository.read_many(query) + else + query.repository.read_one(query) + end + end + + def [](*key) + warn("#{name}[] is deprecated. Use #{name}.get! instead.") + get!(*key) + end + + def first_or_create(query, attributes = {}) + first(query) || begin + resource = allocate + query = query.dup + + properties(repository_name).key.each do |property| + if value = query.delete(property.name) + resource.send("#{property.name}=", value) + end + end + + resource.attributes = query.merge(attributes) + resource.save + resource + end + end + + ## + # Create an instance of Resource with the given attributes + # + # @param Object)> attributes hash of attributes to set + def create(attributes = {}) + resource = new(attributes) + resource.save + resource + end + + ## + # This method is deprecated, and will be removed from dm-core. + # + def create!(attributes = {}) + warn("Model#create! is deprecated. It is moving to dm-validations, and will be used to create a record without validations") + resource = create(attributes) + raise PersistenceError, "Resource not saved: :new_record => #{resource.new_record?}, :dirty_attributes => #{resource.dirty_attributes.inspect}" if resource.new_record? + resource + end + + # TODO SPEC + def copy(source, destination, query = {}) + repository(destination) do + repository(source).read_many(scoped_query(query)).each do |resource| + self.create(resource.attributes) + end + end + end + + # @api private + # TODO: spec this + def load(values, query) + repository = query.repository + model = self + + if inheritance_property_index = query.inheritance_property_index + model = values.at(inheritance_property_index) || model + end + + if key_property_indexes = query.key_property_indexes(repository) + key_values = values.values_at(*key_property_indexes) + identity_map = repository.identity_map(model) + + if resource = identity_map.get(key_values) + return resource unless query.reload? + else + resource = model.allocate + resource.instance_variable_set(:@repository, repository) + identity_map.set(key_values, resource) + end + else + resource = model.allocate + resource.readonly! + end + + resource.instance_variable_set(:@new_record, false) + + query.fields.zip(values) do |property,value| + value = property.custom? ? property.type.load(value, property) : property.typecast(value) + property.set!(resource, value) + + if track = property.track + case track + when :hash + resource.original_values[property.name] = value.dup.hash unless resource.original_values.has_key?(property.name) rescue value.hash + when :load + resource.original_values[property.name] = value unless resource.original_values.has_key?(property.name) + end + end + end + + resource + end + + # TODO: spec this + def to_query(repository, key, query = {}) + conditions = Hash[ *self.key(repository_name).zip(key).flatten ] + Query.new(repository, self, query.merge(conditions)) + end + + def typecast_key(key) + self.key(repository_name).zip(key).map { |k, v| k.typecast(v) } + end + + def default_repository_name + Repository.default_name + end + + def paranoid_properties + @paranoid_properties ||= {} + @paranoid_properties + end + + private + + def default_storage_name + self.name + end + + def scoped_query(query = self.query) + assert_kind_of 'query', query, Query, Hash + + return self.query if query == self.query + + query = if query.kind_of?(Hash) + Query.new(query.has_key?(:repository) ? query.delete(:repository) : self.repository, self, query) + else + query + end + + if self.query + self.query.merge(query) + else + merge_with_default_scope(query) + end + end + + def set_paranoid_property(name, &block) + self.paranoid_properties[name] = block + end + + # defines the getter for the property + def create_property_getter(property) + class_eval <<-EOS, __FILE__, __LINE__ + #{property.reader_visibility} + def #{property.getter} + attribute_get(#{property.name.inspect}) + end + EOS + + if property.primitive == TrueClass && !instance_methods.include?(property.name.to_s) + class_eval <<-EOS, __FILE__, __LINE__ + #{property.reader_visibility} + alias #{property.name} #{property.getter} + EOS + end + end + + # defines the setter for the property + def create_property_setter(property) + unless instance_methods.include?("#{property.name}=") + class_eval <<-EOS, __FILE__, __LINE__ + #{property.writer_visibility} + def #{property.name}=(value) + attribute_set(#{property.name.inspect}, value) + end + EOS + end + end + + def relationships(*args) + # DO NOT REMOVE! + # method_missing depends on these existing. Without this stub, + # a missing module can cause misleading recursive errors. + raise NotImplementedError.new + end + + def method_missing(method, *args, &block) + if relationship = self.relationships(repository_name)[method] + klass = self == relationship.child_model ? relationship.parent_model : relationship.child_model + return DataMapper::Query::Path.new(repository, [ relationship ], klass) + end + + if property = properties(repository_name)[method] + return property + end + + super + end + + # TODO: move to dm-more/dm-transactions + module Transaction + # + # Produce a new Transaction for this Resource class + # + # @return false + # # Cannot be null + # property :publish, TrueClass, :default => false + # # Default value for new records is false + # end + # + # By default, DataMapper supports the following primitive types: + # + # * TrueClass, Boolean + # * String + # * Text (limit of 65k characters by default) + # * Float + # * Integer + # * BigDecimal + # * DateTime + # * Date + # * Time + # * Object (marshalled out during serialization) + # * Class (datastore primitive is the same as String. Used for Inheritance) + # + # For more information about available Types, see DataMapper::Type + # + # == Limiting Access + # Property access control is uses the same terminology Ruby does. Properties + # are public by default, but can also be declared private or protected as + # needed (via the :accessor option). + # + # class Post + # include DataMapper::Resource + # property :title, String, :accessor => :private + # # Both reader and writer are private + # property :body, Text, :accessor => :protected + # # Both reader and writer are protected + # end + # + # Access control is also analogous to Ruby accessors and mutators, and can + # be declared using :reader and :writer, in addition to :accessor. + # + # class Post + # include DataMapper::Resource + # + # property :title, String, :writer => :private + # # Only writer is private + # + # property :tags, String, :reader => :protected + # # Only reader is protected + # end + # + # == Overriding Accessors + # The accessor for any property can be overridden in the same manner that Ruby + # class accessors can be. After the property is defined, just add your custom + # accessor: + # + # class Post + # include DataMapper::Resource + # property :title, String + # + # def title=(new_title) + # raise ArgumentError if new_title != 'Luke is Awesome' + # @title = new_title + # end + # end + # + # == Lazy Loading + # By default, some properties are not loaded when an object is fetched in + # DataMapper. These lazily loaded properties are fetched on demand when their + # accessor is called for the first time (as it is often unnecessary to + # instantiate -every- property -every- time an object is loaded). For + # instance, DataMapper::Types::Text fields are lazy loading by default, + # although you can over-ride this behavior if you wish: + # + # Example: + # + # class Post + # include DataMapper::Resource + # property :title, String # Loads normally + # property :body, DataMapper::Types::Text # Is lazily loaded by default + # end + # + # If you want to over-ride the lazy loading on any field you can set it to a + # context or false to disable it with the :lazy option. Contexts allow + # multipule lazy properties to be loaded at one time. If you set :lazy to + # true, it is placed in the :default context + # + # class Post + # include DataMapper::Resource + # + # property :title, String + # # Loads normally + # + # property :body, DataMapper::Types::Text, :lazy => false + # # The default is now over-ridden + # + # property :comment, String, lazy => [:detailed] + # # Loads in the :detailed context + # + # property :author, String, lazy => [:summary,:detailed] + # # Loads in :summary & :detailed context + # end + # + # Delaying the request for lazy-loaded attributes even applies to objects + # accessed through associations. In a sense, DataMapper anticipates that + # you will likely be iterating over objects in associations and rolls all + # of the load commands for lazy-loaded properties into one request from + # the database. + # + # Example: + # + # Widget[1].components + # # loads when the post object is pulled from database, by default + # + # Widget[1].components.first.body + # # loads the values for the body property on all objects in the + # # association, rather than just this one. + # + # Widget[1].components.first.comment + # # loads both comment and author for all objects in the association + # # since they are both in the :detailed context + # + # == Keys + # Properties can be declared as primary or natural keys on a table. + # You should a property as the primary key of the table: + # + # Examples: + # + # property :id, Serial # auto-incrementing key + # property :legacy_pk, String, :key => true # 'natural' key + # + # This is roughly equivalent to ActiveRecord's set_primary_key, + # though non-integer data types may be used, thus DataMapper supports natural + # keys. When a property is declared as a natural key, accessing the object + # using the indexer syntax Class[key] remains valid. + # + # User[1] + # # when :id is the primary key on the users table + # User['bill'] + # # when :name is the primary (natural) key on the users table + # + # == Indeces + # You can add indeces for your properties by using the :index + # option. If you use true as the option value, the index will be + # automatically named. If you want to name the index yourself, use a symbol + # as the value. + # + # property :last_name, String, :index => true + # property :first_name, String, :index => :name + # + # You can create multi-column composite indeces by using the same symbol in + # all the columns belonging to the index. The columns will appear in the + # index in the order they are declared. + # + # property :last_name, String, :index => :name + # property :first_name, String, :index => :name + # # => index on (last_name, first_name) + # + # If you want to make the indeces unique, use :unique_index instead + # of :index + # + # == Inferred Validations + # If you require the dm-validations plugin, auto-validations will + # automatically be mixed-in in to your model classes: + # validation rules that are inferred when properties are declared with + # specific column restrictions. + # + # class Post + # include DataMapper::Resource + # + # property :title, String, :length => 250 + # # => infers 'validates_length :title, + # :minimum => 0, :maximum => 250' + # + # property :title, String, :nullable => false + # # => infers 'validates_present :title + # + # property :email, String, :format => :email_address + # # => infers 'validates_format :email, :with => :email_address + # + # property :title, String, :length => 255, :nullable => false + # # => infers both 'validates_length' as well as + # # 'validates_present' + # # better: property :title, String, :length => 1..255 + # + # end + # + # This functionality is available with the dm-validations gem, part of the + # dm-more bundle. For more information about validations, check the + # documentation for dm-validations. + # + # == Default Values + # To set a default for a property, use the :default key. The + # property will be set to the value associated with that key the first time + # it is accessed, or when the resource is saved if it hasn't been set with + # another value already. This value can be a static value, such as 'hello' + # but it can also be a proc that will be evaluated when the property is read + # before its value has been set. The property is set to the return of the + # proc. The proc is passed two values, the resource the property is being set + # for and the property itself. + # + # property :display_name, String, :default => { |r, p| r.login } + # + # Word of warning. Don't try to read the value of the property you're setting + # the default for in the proc. An infinite loop will ensue. + # + # == Embedded Values + # As an alternative to extraneous has_one relationships, consider using an + # EmbeddedValue. + # + # == Misc. Notes + # * Properties declared as strings will default to a length of 50, rather than + # 255 (typical max varchar column size). To overload the default, pass + # :length => 255 or :length => 0..255. Since DataMapper + # does not introspect for properties, this means that legacy database tables + # may need their String columns defined with a :length so + # that DM does not apply an un-needed length validation, or allow overflow. + # * You may declare a Property with the data-type of Class. + # see SingleTableInheritance for more on how to use Class columns. + class Property + include Assertions + + # NOTE: check is only for psql, so maybe the postgres adapter should + # define its own property options. currently it will produce a warning tho + # since PROPERTY_OPTIONS is a constant + # + # NOTE: PLEASE update PROPERTY_OPTIONS in DataMapper::Type when updating + # them here + PROPERTY_OPTIONS = [ + :accessor, :reader, :writer, + :lazy, :default, :nullable, :key, :serial, :field, :size, :length, + :format, :index, :unique_index, :check, :ordinal, :auto_validation, + :validates, :unique, :track, :precision, :scale + ] + + # FIXME: can we pull the keys from + # DataMapper::Adapters::DataObjectsAdapter::TYPES + # for this? + TYPES = [ + TrueClass, + String, + DataMapper::Types::Text, + Float, + Integer, + BigDecimal, + DateTime, + Date, + Time, + Object, + Class, + DataMapper::Types::Discriminator, + DataMapper::Types::Serial + ] + + IMMUTABLE_TYPES = [ TrueClass, Float, Integer, BigDecimal] + + VISIBILITY_OPTIONS = [ :public, :protected, :private ] + + DEFAULT_LENGTH = 50 + DEFAULT_PRECISION = 10 + DEFAULT_SCALE_BIGDECIMAL = 0 + DEFAULT_SCALE_FLOAT = nil + + attr_reader :primitive, :model, :name, :instance_variable_name, + :type, :reader_visibility, :writer_visibility, :getter, :options, + :default, :precision, :scale, :track, :extra_options + + # Supplies the field in the data-store which the property corresponds to + # + # @return name of field in data-store + # - + # @api semi-public + def field(repository_name = nil) + @field || @fields[repository_name] ||= self.model.field_naming_convention(repository_name).call(self) + end + + def unique + @unique ||= @options.fetch(:unique, @serial || @key || false) + end + + def hash + if @custom && !@bound + @type.bind(self) + @bound = true + end + + return @model.hash + @name.hash + end + + def eql?(o) + if o.is_a?(Property) + return o.model == @model && o.name == @name + else + return false + end + end + + def length + @length.is_a?(Range) ? @length.max : @length + end + alias size length + + def index + @index + end + + def unique_index + @unique_index + end + + # Returns whether or not the property is to be lazy-loaded + # + # @return whether or not the property is to be + # lazy-loaded + # - + # @api public + def lazy? + @lazy + end + + # Returns whether or not the property is a key or a part of a key + # + # @return whether the property is a key or a part of + # a key + #- + # @api public + def key? + @key + end + + # Returns whether or not the property is "serial" (auto-incrementing) + # + # @return whether or not the property is "serial" + #- + # @api public + def serial? + @serial + end + + # Returns whether or not the property can accept 'nil' as it's value + # + # @return whether or not the property can accept 'nil' + #- + # @api public + def nullable? + @nullable + end + + def custom? + @custom + end + + # Provides a standardized getter method for the property + # + # @raise "+resource+ should be a DataMapper::Resource, but was ...." + #- + # @api private + def get(resource) + lazy_load(resource) + + value = get!(resource) + + set_original_value(resource, value) + + # [YK] Why did we previously care whether options[:default] is nil. + # The default value of nil will be applied either way + if value.nil? && resource.new_record? && !resource.attribute_loaded?(name) + value = default_for(resource) + set(resource, value) + end + + value + end + + def get!(resource) + resource.instance_variable_get(instance_variable_name) + end + + def set_original_value(resource, val) + unless resource.original_values.key?(name) + val = val.try_dup + val = val.hash if track == :hash + resource.original_values[name] = val + end + end + + # Provides a standardized setter method for the property + # + # @raise "+resource+ should be a DataMapper::Resource, but was ...." + #- + # @api private + def set(resource, value) + # [YK] We previously checked for new_record? here, but lazy loading + # is blocked anyway if we're in a new record by by + # Resource#reload_attributes. This may eventually be useful for + # optimizing, but let's (a) benchmark it first, and (b) do + # whatever refactoring is necessary, which will benefit from the + # centralize checking + lazy_load(resource) + + new_value = typecast(value) + old_value = get!(resource) + + set_original_value(resource, old_value) + + set!(resource, new_value) + end + + def set!(resource, value) + resource.instance_variable_set(instance_variable_name, value) + end + + # Loads lazy columns when get or set is called. + #- + # @api private + def lazy_load(resource) + # It is faster to bail out at at a new_record? rather than to process + # which properties would be loaded and then not load them. + return if resource.new_record? || resource.attribute_loaded?(name) + # If we're trying to load a lazy property, load it. Otherwise, lazy-load + # any properties that should be eager-loaded but were not included + # in the original :fields list + contexts = lazy? ? name : model.eager_properties(resource.repository.name) + resource.send(:lazy_load, contexts) + end + + # typecasts values into a primitive + # + # @return the primitive data-type, defaults to TrueClass + #- + # @api private + def typecast(value) + return type.typecast(value, self) if type.respond_to?(:typecast) + return value if value.kind_of?(primitive) || value.nil? + begin + if primitive == TrueClass then %w[ true 1 t ].include?(value.to_s.downcase) + elsif primitive == String then value.to_s + elsif primitive == Float then value.to_f + elsif primitive == Integer + # The simplest possible implementation, i.e. value.to_i, is not + # desirable because "junk".to_i gives "0". We want nil instead, + # because this makes it clear that the typecast failed. + # + # After benchmarking, we preferred the current implementation over + # these two alternatives: + # * Integer(value) rescue nil + # * Integer(value_to_s =~ /(\d+)/ ? $1 : value_to_s) rescue nil + # + # [YK] The previous implementation used a rescue. Why use a rescue + # when the list of cases where a valid string other than "0" could + # produce 0 is known? + value_to_i = value.to_i + if value_to_i == 0 + value.to_s =~ /^(0x|0b)?0+/ ? 0 : nil + else + value_to_i + end + elsif primitive == BigDecimal then BigDecimal(value.to_s) + elsif primitive == DateTime then typecast_to_datetime(value) + elsif primitive == Date then typecast_to_date(value) + elsif primitive == Time then typecast_to_time(value) + elsif primitive == Class then self.class.find_const(value) + else + value + end + rescue + value + end + end + + def default_for(resource) + @default.respond_to?(:call) ? @default.call(resource, self) : @default + end + + def value(val) + custom? ? self.type.dump(val, self) : val + end + + def inspect + "#" + end + + private + + def initialize(model, name, type, options = {}) + assert_kind_of 'model', model, Model + assert_kind_of 'name', name, Symbol + assert_kind_of 'type', type, Class + + if Fixnum == type + # It was decided that Integer is a more expressively names class to + # use instead of Fixnum. Fixnum only represents smaller numbers, + # so there was some confusion over whether or not it would also + # work with Bignum too (it will). Any Integer, which includes + # Fixnum and Bignum, can be stored in this property. + warn "#{type} properties are deprecated. Please use Integer instead" + type = Integer + end + + unless TYPES.include?(type) || (DataMapper::Type > type && TYPES.include?(type.primitive)) + raise ArgumentError, "+type+ was #{type.inspect}, which is not a supported type: #{TYPES * ', '}", caller + end + + @extra_options = {} + (options.keys - PROPERTY_OPTIONS).each do |key| + @extra_options[key] = options.delete(key) + end + + @model = model + @name = name.to_s.sub(/\?$/, '').to_sym + @type = type + @custom = DataMapper::Type > @type + @options = @custom ? @type.options.merge(options) : options + @instance_variable_name = "@#{@name}" + + # TODO: This default should move to a DataMapper::Types::Text + # Custom-Type and out of Property. + @primitive = @options.fetch(:primitive, @type.respond_to?(:primitive) ? @type.primitive : @type) + + @getter = TrueClass == @primitive ? "#{@name}?".to_sym : @name + @field = @options.fetch(:field, nil) + @serial = @options.fetch(:serial, false) + @key = @options.fetch(:key, @serial || false) + @default = @options.fetch(:default, nil) + @nullable = @options.fetch(:nullable, @key == false) + @index = @options.fetch(:index, false) + @unique_index = @options.fetch(:unique_index, false) + @lazy = @options.fetch(:lazy, @type.respond_to?(:lazy) ? @type.lazy : false) && !@key + @fields = {} + + @track = @options.fetch(:track) do + if @custom && @type.respond_to?(:track) && @type.track + @type.track + else + IMMUTABLE_TYPES.include?(@primitive) ? :set : :get + end + end + + # assign attributes per-type + if String == @primitive || Class == @primitive + @length = @options.fetch(:length, @options.fetch(:size, DEFAULT_LENGTH)) + elsif BigDecimal == @primitive || Float == @primitive + @precision = @options.fetch(:precision, DEFAULT_PRECISION) + + default_scale = (Float == @primitive) ? DEFAULT_SCALE_FLOAT : DEFAULT_SCALE_BIGDECIMAL + @scale = @options.fetch(:scale, default_scale) + # @scale = @options.fetch(:scale, DEFAULT_SCALE_BIGDECIMAL) + + unless @precision > 0 + raise ArgumentError, "precision must be greater than 0, but was #{@precision.inspect}" + end + + if (BigDecimal == @primitive) || (Float == @primitive && !@scale.nil?) + unless @scale >= 0 + raise ArgumentError, "scale must be equal to or greater than 0, but was #{@scale.inspect}" + end + + unless @precision >= @scale + raise ArgumentError, "precision must be equal to or greater than scale, but was #{@precision.inspect} and scale was #{@scale.inspect}" + end + end + end + + determine_visibility + + @model.auto_generate_validations(self) if @model.respond_to?(:auto_generate_validations) + @model.property_serialization_setup(self) if @model.respond_to?(:property_serialization_setup) + end + + def determine_visibility # :nodoc: + @reader_visibility = @options[:reader] || @options[:accessor] || :public + @writer_visibility = @options[:writer] || @options[:accessor] || :public + + unless VISIBILITY_OPTIONS.include?(@reader_visibility) && VISIBILITY_OPTIONS.include?(@writer_visibility) + raise ArgumentError, 'property visibility must be :public, :protected, or :private', caller(2) + end + end + + # Typecasts an arbitrary value to a DateTime + def typecast_to_datetime(value) + case value + when Hash then typecast_hash_to_datetime(value) + else DateTime.parse(value.to_s) + end + end + + # Typecasts an arbitrary value to a Date + def typecast_to_date(value) + case value + when Hash then typecast_hash_to_date(value) + else Date.parse(value.to_s) + end + end + + # Typecasts an arbitrary value to a Time + def typecast_to_time(value) + case value + when Hash then typecast_hash_to_time(value) + else Time.parse(value.to_s) + end + end + + def typecast_hash_to_datetime(hash) + args = extract_time_args_from_hash(hash, :year, :month, :day, :hour, :min, :sec) + DateTime.new(*args) + rescue ArgumentError => e + t = typecast_hash_to_time(hash) + DateTime.new(t.year, t.month, t.day, t.hour, t.min, t.sec) + end + + def typecast_hash_to_date(hash) + args = extract_time_args_from_hash(hash, :year, :month, :day) + Date.new(*args) + rescue ArgumentError + t = typecast_hash_to_time(hash) + Date.new(t.year, t.month, t.day) + end + + def typecast_hash_to_time(hash) + args = extract_time_args_from_hash(hash, :year, :month, :day, :hour, :min, :sec) + Time.local(*args) + end + + # Extracts the given args from the hash. If a value does not exist, it + # uses the value of Time.now + def extract_time_args_from_hash(hash, *args) + now = Time.now + args.map { |arg| hash[arg] || hash[arg.to_s] || now.send(arg) } + end + end # class Property +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/property_set.rb b/vendor/dm-core-0.9.6/lib/dm-core/property_set.rb new file mode 100755 index 0000000..40f269b --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/property_set.rb @@ -0,0 +1,169 @@ +module DataMapper + class PropertySet + include Assertions + include Enumerable + + def [](name) + property_for(name) || raise(ArgumentError, "Unknown property '#{name}'", caller) + end + + def []=(name, property) + @key, @defaults = nil + if existing_property = detect { |p| p.name == name } + property.hash + @entries[@entries.index(existing_property)] = property + else + add(property) + end + property + end + + def has_property?(name) + !!property_for(name) + end + + def slice(*names) + @key, @defaults = nil + names.map do |name| + property_for(name) + end + end + + def clear + @key, @defaults = nil + @entries.clear + end + + def add(*properties) + @key, @defaults = nil + @entries.push(*properties) + properties.each { |property| property.hash } + self + end + + alias << add + + def length + @entries.length + end + + def empty? + @entries.empty? + end + + def each + @entries.each { |property| yield property } + self + end + + def defaults + @defaults ||= reject { |property| property.lazy? } + end + + def key + @key ||= select { |property| property.key? } + end + + def indexes + index_hash = {} + repository_name = repository.name + each { |property| parse_index(property.index, property.field(repository_name), index_hash) } + index_hash + end + + def unique_indexes + index_hash = {} + repository_name = repository.name + each { |property| parse_index(property.unique_index, property.field(repository_name), index_hash) } + index_hash + end + + def get(resource) + map { |property| property.get(resource) } + end + + def set(resource, values) + if values.kind_of?(Array) && values.length != length + raise ArgumentError, "+values+ must have a length of #{length}, but has #{values.length}", caller + end + + each_with_index { |property,i| property.set(resource, values.nil? ? nil : values[i]) } + end + + def property_contexts(name) + contexts = [] + lazy_contexts.each do |context,property_names| + contexts << context if property_names.include?(name) + end + contexts + end + + def lazy_context(name) + lazy_contexts[name] ||= [] + end + + def lazy_load_context(names) + if names.kind_of?(Array) && names.empty? + raise ArgumentError, '+names+ cannot be empty', caller + end + + result = [] + + Array(names).each do |name| + contexts = property_contexts(name) + if contexts.empty? + result << name # not lazy + else + result |= lazy_contexts.values_at(*contexts).flatten.uniq + end + end + result + end + + def to_query(bind_values) + Hash[ *zip(bind_values).flatten ] + end + + def inspect + '#' + end + + private + + def initialize(properties = []) + assert_kind_of 'properties', properties, Enumerable + + @entries = properties + @property_for = {} + end + + def initialize_copy(orig) + @key, @defaults = nil + @entries = orig.entries.dup + @property_for = {} + end + + def lazy_contexts + @lazy_contexts ||= {} + end + + def parse_index(index, property, index_hash) + case index + when true then index_hash[property] = [property] + when Symbol + index_hash[index.to_s] ||= [] + index_hash[index.to_s] << property + when Enumerable then index.each { |idx| parse_index(idx, property, index_hash) } + end + end + + def property_for(name) + unless @property_for[name] + property = detect { |property| property.name == name.to_sym } + @property_for[name.to_s] = @property_for[name.to_sym] = property if property + end + @property_for[name] + end + + end # class PropertySet +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/query.rb b/vendor/dm-core-0.9.6/lib/dm-core/query.rb new file mode 100644 index 0000000..f687875 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/query.rb @@ -0,0 +1,627 @@ +module DataMapper + class Query + include Assertions + + OPTIONS = [ + :reload, :offset, :limit, :order, :add_reversed, :fields, :links, :includes, :conditions, :unique + ] + + attr_reader :repository, :model, *OPTIONS - [ :reload, :unique ] + attr_writer :add_reversed + alias add_reversed? add_reversed + + def reload? + @reload + end + + def unique? + @unique + end + + def reverse + dup.reverse! + end + + def reverse! + # reverse the sort order + update(:order => self.order.map { |o| o.reverse }) + + self + end + + def update(other) + assert_kind_of 'other', other, self.class, Hash + + assert_valid_other(other) + + if other.kind_of?(Hash) + return self if other.empty? + other = self.class.new(@repository, model, other) + end + + return self if self == other + + # TODO: update this so if "other" had a value explicitly set + # overwrite the attributes in self + + # only overwrite the attributes with non-default values + @reload = other.reload? unless other.reload? == false + @unique = other.unique? unless other.unique? == false + @offset = other.offset if other.reload? || other.offset != 0 + @limit = other.limit unless other.limit == nil + @order = other.order unless other.order == model.default_order + @add_reversed = other.add_reversed? unless other.add_reversed? == false + @fields = other.fields unless other.fields == @properties.defaults + @links = other.links unless other.links == [] + @includes = other.includes unless other.includes == [] + + update_conditions(other) + + self + end + + def merge(other) + dup.update(other) + end + + def ==(other) + return true if super + return false unless other.kind_of?(self.class) + + # TODO: add a #hash method, and then use it in the comparison, eg: + # return hash == other.hash + @model == other.model && + @reload == other.reload? && + @unique == other.unique? && + @offset == other.offset && + @limit == other.limit && + @order == other.order && # order is significant, so do not sort this + @add_reversed == other.add_reversed? && + @fields == other.fields && # TODO: sort this so even if the order is different, it is equal + @links == other.links && # TODO: sort this so even if the order is different, it is equal + @includes == other.includes && # TODO: sort this so even if the order is different, it is equal + @conditions.sort_by { |c| c.at(0).hash + c.at(1).hash + c.at(2).hash } == other.conditions.sort_by { |c| c.at(0).hash + c.at(1).hash + c.at(2).hash } + end + + alias eql? == + + def bind_values + bind_values = [] + conditions.each do |tuple| + next if tuple.size == 2 + operator, property, bind_value = *tuple + if :raw == operator + bind_values.push(*bind_value) + else + bind_values << bind_value + end + end + bind_values + end + + def inheritance_property + fields.detect { |property| property.type == DataMapper::Types::Discriminator } + end + + def inheritance_property_index + fields.index(inheritance_property) + end + + # TODO: spec this + def key_property_indexes(repository) + if (key_property_indexes = model.key(repository.name).map { |property| fields.index(property) }).all? + key_property_indexes + end + end + + # find the point in self.conditions where the sub select tuple is + # located. Delete the tuple and add value.conditions. value must be a + # + # + def merge_subquery(operator, property, value) + assert_kind_of 'value', value, self.class + + new_conditions = [] + conditions.each do |tuple| + if tuple.at(0).to_s == operator.to_s && tuple.at(1) == property && tuple.at(2) == value + value.conditions.each do |subquery_tuple| + new_conditions << subquery_tuple + end + else + new_conditions << tuple + end + end + @conditions = new_conditions + end + + def inspect + attrs = [ + [ :repository, repository.name ], + [ :model, model ], + [ :fields, fields ], + [ :links, links ], + [ :conditions, conditions ], + [ :order, order ], + [ :limit, limit ], + [ :offset, offset ], + [ :reload, reload? ], + [ :unique, unique? ], + ] + + "#<#{self.class.name} #{attrs.map { |(k,v)| "@#{k}=#{v.inspect}" } * ' '}>" + end + + private + + def initialize(repository, model, options = {}) + assert_kind_of 'repository', repository, Repository + assert_kind_of 'model', model, Model + assert_kind_of 'options', options, Hash + + options.each_pair { |k,v| options[k] = v.call if v.is_a? Proc } if options.is_a? Hash + + assert_valid_options(options) + + @repository = repository + @properties = model.properties(@repository.name) + + @model = model # must be Class that includes DM::Resource + @reload = options.fetch :reload, false # must be true or false + @unique = options.fetch :unique, false # must be true or false + @offset = options.fetch :offset, 0 # must be an Integer greater than or equal to 0 + @limit = options.fetch :limit, nil # must be an Integer greater than or equal to 1 + @order = options.fetch :order, model.default_order(@repository.name) # must be an Array of Symbol, DM::Query::Direction or DM::Property + @add_reversed = options.fetch :add_reversed, false # must be true or false + @fields = options.fetch :fields, @properties.defaults # must be an Array of Symbol, String or DM::Property + @links = options.fetch :links, [] # must be an Array of Tuples - Tuple [DM::Query,DM::Assoc::Relationship] + @includes = options.fetch :includes, [] # must be an Array of DM::Query::Path + @conditions = [] # must be an Array of triplets (or pairs when passing in raw String queries) + + # normalize order and fields + @order = normalize_order(@order) + @fields = normalize_fields(@fields) + + # XXX: should I validate that each property in @order corresponds + # to something in @fields? Many DB engines require they match, + # and I can think of no valid queries where a field would be so + # important that you sort on it, but not important enough to + # return. + + # normalize links and includes. + # NOTE: this must be done after order and fields + @links = normalize_links(@links) + @includes = normalize_includes(@includes) + + # treat all non-options as conditions + (options.keys - OPTIONS).each do |k| + append_condition(k, options[k]) + end + + # parse raw options[:conditions] differently + if conditions = options[:conditions] + if conditions.kind_of?(Hash) + conditions.each do |k,v| + append_condition(k, v) + end + elsif conditions.kind_of?(Array) + raw_query, *bind_values = conditions + @conditions << if bind_values.empty? + [ :raw, raw_query ] + else + [ :raw, raw_query, bind_values ] + end + end + end + end + + def initialize_copy(original) + # deep-copy the condition tuples when copying the object + @conditions = original.conditions.map { |tuple| tuple.dup } + end + + # validate the options + def assert_valid_options(options) + # [DB] This might look more ugly now, but it's 2x as fast as the old code + # [DB] This is one of the heavy spots for Query.new I found during profiling. + options.each_pair do |attribute, value| + + # validate the reload option and unique option + if [:reload, :unique].include? attribute + if value != true && value != false + raise ArgumentError, "+options[:#{attribute}]+ must be true or false, but was #{value.inspect}", caller(2) + end + + # validate the offset and limit options + elsif [:offset, :limit].include? attribute + assert_kind_of "options[:#{attribute}]", value, Integer + if attribute == :offset && value < 0 + raise ArgumentError, "+options[:offset]+ must be greater than or equal to 0, but was #{value.inspect}", caller(2) + elsif attribute == :limit && value < 1 + raise ArgumentError, "+options[:limit]+ must be greater than or equal to 1, but was #{options[:limit].inspect}", caller(2) + end + + # validate the :order, :fields, :links and :includes options + elsif [ :order, :fields, :links, :includes ].include? attribute + assert_kind_of "options[:#{attribute}]", value, Array + + if value.empty? + if attribute == :fields + if options[:unique] == false + raise ArgumentError, '+options[:fields]+ cannot be empty if +options[:unique] is false', caller(2) + end + elsif attribute == :order + if options[:fields] && options[:fields].any? { |p| !p.kind_of?(Operator) } + raise ArgumentError, '+options[:order]+ cannot be empty if +options[:fields] contains a non-operator', caller(2) + end + else + raise ArgumentError, "+options[:#{attribute}]+ cannot be empty", caller(2) + end + end + + # validates the :conditions option + elsif :conditions == attribute + assert_kind_of 'options[:conditions]', value, Hash, Array + + if value.empty? + raise ArgumentError, '+options[:conditions]+ cannot be empty', caller(2) + end + end + end + end + + # validate other DM::Query or Hash object + def assert_valid_other(other) + return unless other.kind_of?(self.class) + + unless other.repository == repository + raise ArgumentError, "+other+ #{self.class} must be for the #{repository.name} repository, not #{other.repository.name}", caller(2) + end + + unless other.model == model + raise ArgumentError, "+other+ #{self.class} must be for the #{model.name} model, not #{other.model.name}", caller(2) + end + end + + # normalize order elements to DM::Query::Direction + def normalize_order(order) + order.map do |order_by| + case order_by + when Direction + # NOTE: The property is available via order_by.property + # TODO: if the Property's model doesn't match + # self.model, append the property's model to @links + # eg: + #if property.model != self.model + # @links << discover_path_for_property(property) + #end + + order_by + when Property + # TODO: if the Property's model doesn't match + # self.model, append the property's model to @links + # eg: + #if property.model != self.model + # @links << discover_path_for_property(property) + #end + + Direction.new(order_by) + when Operator + property = @properties[order_by.target] + Direction.new(property, order_by.operator) + when Symbol, String + property = @properties[order_by] + + if property.nil? + raise ArgumentError, "+options[:order]+ entry #{order_by} does not map to a DataMapper::Property", caller(2) + end + + Direction.new(property) + else + raise ArgumentError, "+options[:order]+ entry #{order_by.inspect} not supported", caller(2) + end + end + end + + # normalize fields to DM::Property + def normalize_fields(fields) + # TODO: return a PropertySet + # TODO: raise an exception if the property is not available in the repository + fields.map do |field| + case field + when Property, Operator + # TODO: if the Property's model doesn't match + # self.model, append the property's model to @links + # eg: + #if property.model != self.model + # @links << discover_path_for_property(property) + #end + field + when Symbol, String + property = @properties[field] + + if property.nil? + raise ArgumentError, "+options[:fields]+ entry #{field} does not map to a DataMapper::Property", caller(2) + end + + property + else + raise ArgumentError, "+options[:fields]+ entry #{field.inspect} not supported", caller(2) + end + end + end + + # normalize links to DM::Query::Path + def normalize_links(links) + # XXX: this should normalize to DM::Query::Path, not DM::Association::Relationship + # because a link may be more than one-hop-away from the source. A DM::Query::Path + # should include an Array of Relationship objects that trace the "path" between + # the source and the target. + links.map do |link| + case link + when Associations::Relationship + link + when Symbol, String + link = link.to_sym if link.kind_of?(String) + + unless model.relationships(@repository.name).has_key?(link) + raise ArgumentError, "+options[:links]+ entry #{link} does not map to a DataMapper::Associations::Relationship", caller(2) + end + + model.relationships(@repository.name)[link] + else + raise ArgumentError, "+options[:links]+ entry #{link.inspect} not supported", caller(2) + end + end + end + + # normalize includes to DM::Query::Path + def normalize_includes(includes) + # TODO: normalize Array of Symbol, String, DM::Property 1-jump-away or DM::Query::Path + # NOTE: :includes can only be and array of DM::Query::Path objects now. This method + # can go away after review of what has been done. + includes + end + + # validate that all the links or includes are present for the given DM::Query::Path + # + def validate_query_path_links(path) + path.relationships.map do |relationship| + @links << relationship unless (@links.include?(relationship) || @includes.include?(relationship)) + end + end + + def append_condition(clause, bind_value) + operator = :eql + bind_value = bind_value.call if bind_value.is_a?(Proc) + + property = case clause + when Property + clause + when Query::Path + validate_query_path_links(clause) + clause + when Operator + operator = clause.operator + return if operator == :not && bind_value == [] + if clause.target.is_a?(Symbol) + @properties[clause.target] + elsif clause.target.is_a?(Query::Path) + validate_query_path_links(clause.target) + clause.target + end + when Symbol + @properties[clause] + when String + if clause =~ /\w\.\w/ + query_path = @model + clause.split(".").each { |piece| query_path = query_path.send(piece) } + append_condition(query_path, bind_value) + return + else + @properties[clause] + end + else + raise ArgumentError, "Condition type #{clause.inspect} not supported", caller(2) + end + + if property.nil? + raise ArgumentError, "Clause #{clause.inspect} does not map to a DataMapper::Property", caller(2) + end + + bind_value = dump_custom_value(property, bind_value) + + @conditions << [ operator, property, bind_value ] + end + + def dump_custom_value(property_or_path, bind_value) + case property_or_path + when DataMapper::Query::Path + dump_custom_value(property_or_path.property, bind_value) + when Property + if property_or_path.custom? + property_or_path.type.dump(bind_value, property_or_path) + else + bind_value + end + else + bind_value + end + end + + # TODO: check for other mutually exclusive operator + property + # combinations. For example if self's conditions were + # [ :gt, :amount, 5 ] and the other's condition is [ :lt, :amount, 2 ] + # there is a conflict. When in conflict the other's conditions + # overwrites self's conditions. + + # TODO: Another condition is when the other condition operator is + # eql, this should over-write all the like,range and list operators + # for the same property, since we are now looking for an exact match. + # Vice versa, passing in eql should overwrite all of those operators. + + def update_conditions(other) + @conditions = @conditions.dup + + # build an index of conditions by the property and operator to + # avoid nested looping + conditions_index = {} + @conditions.each do |condition| + operator, property = *condition + next if :raw == operator + conditions_index[property] ||= {} + conditions_index[property][operator] = condition + end + + # loop over each of the other's conditions, and overwrite the + # conditions when in conflict + other.conditions.each do |other_condition| + other_operator, other_property, other_bind_value = *other_condition + + unless :raw == other_operator + conditions_index[other_property] ||= {} + if condition = conditions_index[other_property][other_operator] + operator, property, bind_value = *condition + + next if bind_value == other_bind_value + + # overwrite the bind value in the existing condition + condition[2] = case operator + when :eql, :like then other_bind_value + when :gt, :gte then [ bind_value, other_bind_value ].min + when :lt, :lte then [ bind_value, other_bind_value ].max + when :not, :in + if bind_value.kind_of?(Array) + bind_value |= other_bind_value + elsif other_bind_value.kind_of?(Array) + other_bind_value |= bind_value + else + other_bind_value + end + end + + next # process the next other condition + end + end + + # otherwise append the other condition + @conditions << other_condition.dup + end + + @conditions + end + + class Direction + include Assertions + + attr_reader :property, :direction + + def ==(other) + return true if super + hash == other.hash + end + + alias eql? == + + def hash + @property.hash + @direction.hash + end + + def reverse + self.class.new(@property, @direction == :asc ? :desc : :asc) + end + + def inspect + "#<#{self.class.name} #{@property.inspect} #{@direction}>" + end + + private + + def initialize(property, direction = :asc) + assert_kind_of 'property', property, Property + assert_kind_of 'direction', direction, Symbol + + @property = property + @direction = direction + end + end # class Direction + + class Operator + include Assertions + + attr_reader :target, :operator + + def to_sym + @property_name + end + + def ==(other) + return true if super + return false unless other.kind_of?(self.class) + @operator == other.operator && @target == other.target + end + + private + + def initialize(target, operator) + assert_kind_of 'operator', operator, Symbol + + @target = target + @operator = operator + end + end # class Operator + + class Path + include Assertions + + %w[ id type ].each { |m| undef_method m } + + attr_reader :relationships, :model, :property, :operator + + [ :gt, :gte, :lt, :lte, :not, :eql, :like, :in ].each do |sym| + class_eval <<-EOS, __FILE__, __LINE__ + def #{sym} + Operator.new(self, :#{sym}) + end + EOS + end + + # duck type the DM::Query::Path to act like a DM::Property + def field(*args) + @property ? @property.field(*args) : nil + end + + # more duck typing + def to_sym + @property ? @property.name.to_sym : @model.storage_name(@repository).to_sym + end + + private + + def initialize(repository, relationships, model, property_name = nil) + assert_kind_of 'repository', repository, Repository + assert_kind_of 'relationships', relationships, Array + assert_kind_of 'model', model, Model + assert_kind_of 'property_name', property_name, Symbol unless property_name.nil? + + @repository = repository + @relationships = relationships + @model = model + @property = @model.properties(@repository.name)[property_name] if property_name + end + + def method_missing(method, *args) + if relationship = @model.relationships(@repository.name)[method] + klass = klass = model == relationship.child_model ? relationship.parent_model : relationship.child_model + return Query::Path.new(@repository, @relationships + [ relationship ], klass) + end + + if @model.properties(@repository.name)[method] + @property = @model.properties(@repository.name)[method] unless @property + return self + end + + raise NoMethodError, "undefined property or association `#{method}' on #{@model}" + end + end # class Path + end # class Query +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/repository.rb b/vendor/dm-core-0.9.6/lib/dm-core/repository.rb new file mode 100755 index 0000000..85e9616 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/repository.rb @@ -0,0 +1,159 @@ +module DataMapper + class Repository + include Assertions + + @adapters = {} + + ## + # + # @return the adapters registered for this repository + def self.adapters + @adapters + end + + def self.context + Thread.current[:dm_repository_contexts] ||= [] + end + + def self.default_name + :default + end + + attr_reader :name + + def adapter + # Make adapter instantiation lazy so we can defer repository setup until it's actually + # needed. Do not remove this code. + @adapter ||= begin + raise ArgumentError, "Adapter not set: #{@name}. Did you forget to setup?" \ + unless self.class.adapters.has_key?(@name) + + self.class.adapters[@name] + end + end + + def identity_map(model) + @identity_maps[model] ||= IdentityMap.new + end + + # TODO: spec this + def scope + Repository.context << self + + begin + return yield(self) + ensure + Repository.context.pop + end + end + + def create(resources) + adapter.create(resources) + end + + ## + # retrieve a collection of results of a query + # + # @param query composition of the query to perform + # @return result set of the query + # @see DataMapper::Query + def read_many(query) + adapter.read_many(query) + end + + ## + # retrieve a resource instance by a query + # + # @param query composition of the query to perform + # @return the first retrieved instance which matches the query + # @return no object could be found which matches that query + # @see DataMapper::Query + def read_one(query) + adapter.read_one(query) + end + + def update(attributes, query) + adapter.update(attributes, query) + end + + def delete(query) + adapter.delete(query) + end + + def eql?(other) + return true if super + name == other.name + end + + alias == eql? + + def to_s + "#" + end + + private + + def initialize(name) + assert_kind_of 'name', name, Symbol + + @name = name + @identity_maps = {} + end + + # TODO: move to dm-more/dm-migrations + module Migration + # TODO: move to dm-more/dm-migrations + def map(*args) + type_map.map(*args) + end + + # TODO: move to dm-more/dm-migrations + def type_map + @type_map ||= TypeMap.new(adapter.class.type_map) + end + + ## + # + # @return whether or not the data-store exists for this repo + # + # TODO: move to dm-more/dm-migrations + def storage_exists?(storage_name) + adapter.storage_exists?(storage_name) + end + + # TODO: move to dm-more/dm-migrations + def migrate! + Migrator.migrate(name) + end + + # TODO: move to dm-more/dm-migrations + def auto_migrate! + AutoMigrator.auto_migrate(name) + end + + # TODO: move to dm-more/dm-migrations + def auto_upgrade! + AutoMigrator.auto_upgrade(name) + end + end + + include Migration + + # TODO: move to dm-more/dm-transactions + module Transaction + ## + # Produce a new Transaction for this Repository + # + # + # @return a new Transaction (in state + # :none) that can be used to execute code #with_transaction + # + # TODO: move to dm-more/dm-transactions + def transaction + DataMapper::Transaction.new(self) + end + end + + include Transaction + end # class Repository +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/resource.rb b/vendor/dm-core-0.9.6/lib/dm-core/resource.rb new file mode 100644 index 0000000..d2a6860 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/resource.rb @@ -0,0 +1,637 @@ +require 'set' + +module DataMapper + module Resource + include Assertions + + ## + # + # Appends a module for inclusion into the model class after + # DataMapper::Resource. + # + # This is a useful way to extend DataMapper::Resource while still retaining + # a self.included method. + # + # @param [Module] inclusion the module that is to be appended to the module + # after DataMapper::Resource + # + # @return [TrueClass, FalseClass] whether or not the inclusions have been + # successfully appended to the list + # @return + #- + # @api public + def self.append_inclusions(*inclusions) + extra_inclusions.concat inclusions + true + end + + def self.extra_inclusions + @extra_inclusions ||= [] + end + + # When Resource is included in a class this method makes sure + # it gets all the methods + # + # - + # @api private + def self.included(model) + model.extend Model + model.extend ClassMethods if defined?(ClassMethods) + model.const_set('Resource', self) unless model.const_defined?('Resource') + extra_inclusions.each { |inclusion| model.send(:include, inclusion) } + descendants << model + class << model + @_valid_model = false + attr_reader :_valid_model + end + end + + # Return all classes that include the DataMapper::Resource module + # + # ==== Returns + # Set:: a set containing the including classes + # + # ==== Example + # + # Class Foo + # include DataMapper::Resource + # end + # + # DataMapper::Resource.descendants.to_a.first == Foo + # + # - + # @api semipublic + def self.descendants + @descendants ||= Set.new + end + + # +--------------- + # Instance methods + + attr_writer :collection + + alias model class + + # returns the value of the attribute. Do not read from instance variables directly, + # but use this method. This method handels the lazy loading the attribute and returning + # of defaults if nessesary. + # + # ==== Parameters + # name:: name attribute to lookup + # + # ==== Returns + # :: the value stored at that given attribute, nil if none, and default if necessary + # + # ==== Example + # + # Class Foo + # include DataMapper::Resource + # + # property :first_name, String + # property :last_name, String + # + # def full_name + # "#{attribute_get(:first_name)} #{attribute_get(:last_name)}" + # end + # + # # using the shorter syntax + # def name_for_address_book + # "#{last_name}, #{first_name}" + # end + # end + # + # - + # @api semipublic + def attribute_get(name) + properties[name].get(self) + end + + # sets the value of the attribute and marks the attribute as dirty + # if it has been changed so that it may be saved. Do not set from + # instance variables directly, but use this method. This method + # handels the lazy loading the property and returning of defaults + # if nessesary. + # + # ==== Parameters + # name:: name attribute to set + # value:: value to store at that location + # + # ==== Returns + # :: the value stored at that given attribute, nil if none, and default if necessary + # + # ==== Example + # + # Class Foo + # include DataMapper::Resource + # + # property :first_name, String + # property :last_name, String + # + # def full_name(name) + # name = name.split(' ') + # attribute_set(:first_name, name[0]) + # attribute_set(:last_name, name[1]) + # end + # + # # using the shorter syntax + # def name_from_address_book(name) + # name = name.split(', ') + # first_name = name[1] + # last_name = name[0] + # end + # end + # + # - + # @api semipublic + def attribute_set(name, value) + properties[name].set(self, value) + end + + # Compares if its the same object or if attributes are equal + # + # ==== Parameters + # other:: Object to compare to + # + # ==== Returns + # :: the outcome of the comparison as a boolean + # + # - + # @api public + def eql?(other) + return true if object_id == other.object_id + return false unless other.kind_of?(model) + return true if repository == other.repository && key == other.key + + properties.each do |property| + return false if property.get!(self) != property.get!(other) + end + + true + end + + alias == eql? + + # Computes a hash for the resource + # + # ==== Returns + # :: the hash value of the resource + # + # - + # @api public + def hash + model.hash + key.hash + end + + # Inspection of the class name and the attributes + # + # ==== Returns + # :: with the class name, attributes with their values + # + # ==== Example + # + # >> Foo.new + # => # + # + # - + # @api public + def inspect + attrs = [] + + properties.each do |property| + value = if property.lazy? && !attribute_loaded?(property.name) && !new_record? + '' + else + send(property.getter).inspect + end + + attrs << "#{property.name}=#{value}" + end + + "#<#{model.name} #{attrs * ' '}>" + end + + # TODO docs + def pretty_print(pp) + pp.group(1, "#<#{model.name}", ">") do + pp.breakable + pp.seplist(attributes.to_a) do |k_v| + pp.text k_v[0].to_s + pp.text " = " + pp.pp k_v[1] + end + end + end + + ## + # + # ==== Returns + # :: the respository this resource belongs to in the context of a collection OR in the class's context + # + # @api public + def repository + @repository || model.repository + end + + # default id method to return the resource id when there is a + # single key, and the model was defined with a primary key named + # something other than id + # + # ==== Returns + # key or keys + # + # -- + # @api public + def id + key = self.key + key.first if key.size == 1 + end + + def key + key_properties.map do |property| + original_values[property.name] || property.get!(self) + end + end + + def readonly! + @readonly = true + end + + def readonly? + @readonly == true + end + + # save the instance to the data-store + # + # ==== Returns + # :: results of the save + # + # @see DataMapper::Repository#save + # + # -- + # #public + def save(context = :default) + # Takes a context, but does nothing with it. This is to maintain the + # same API through out all of dm-more. dm-validations requires a + # context to be passed + + associations_saved = false + child_associations.each { |a| associations_saved |= a.save } + + saved = new_record? ? create : update + + if saved + original_values.clear + end + + parent_associations.each { |a| associations_saved |= a.save } + + # We should return true if the model (or any of its associations) + # were saved. + (saved | associations_saved) == true + end + + # destroy the instance, remove it from the repository + # + # ==== Returns + # :: results of the destruction + # + # -- + # @api public + def destroy + return false if new_record? + return false unless repository.delete(to_query) + + @new_record = true + repository.identity_map(model).delete(key) + original_values.clear + + properties.each do |property| + # We'll set the original value to nil as if we had a new record + original_values[property.name] = nil if attribute_loaded?(property.name) + end + + true + end + + # Checks if the attribute has been loaded + # + # ==== Example + # + # class Foo + # include DataMapper::Resource + # property :name, String + # property :description, Text, :lazy => false + # end + # + # Foo.new.attribute_loaded?(:description) # will return false + # + # -- + # @api public + def attribute_loaded?(name) + instance_variable_defined?(properties[name].instance_variable_name) + end + + # fetches all the names of the attributes that have been loaded, + # even if they are lazy but have been called + # + # ==== Returns + # Array[]:: names of attributes that have been loaded + # + # ==== Example + # + # class Foo + # include DataMapper::Resource + # property :name, String + # property :description, Text, :lazy => false + # end + # + # Foo.new.loaded_attributes # returns [:name] + # + # -- + # @api public + def loaded_attributes + properties.map{|p| p.name if attribute_loaded?(p.name)}.compact + end + + # set of original values of properties + # + # ==== Returns + # Hash:: original values of properties + # + # -- + # @api public + def original_values + @original_values ||= {} + end + + # Hash of attributes that have been marked dirty + # + # ==== Returns + # Hash:: attributes that have been marked dirty + # + # -- + # @api private + def dirty_attributes + dirty_attributes = {} + properties = self.properties + + original_values.each do |name, old_value| + property = properties[name] + new_value = property.get!(self) + + dirty = case property.track + when :hash then old_value != new_value.hash + else + property.value(old_value) != property.value(new_value) + end + + if dirty + property.hash + dirty_attributes[property] = property.value(new_value) + end + end + + dirty_attributes + end + + # Checks if the class is dirty + # + # ==== Returns + # True:: returns if class is dirty + # + # -- + # @api public + def dirty? + dirty_attributes.any? + end + + # Checks if the attribute is dirty + # + # ==== Parameters + # name:: name of attribute + # + # ==== Returns + # True:: returns if attribute is dirty + # + # -- + # @api public + def attribute_dirty?(name) + dirty_attributes.has_key?(properties[name]) + end + + def collection + @collection ||= if query = to_query + Collection.new(query) { |c| c << self } + end + end + + # Reload association and all child association + # + # ==== Returns + # self:: returns the class itself + # + # -- + # @api public + def reload + unless new_record? + reload_attributes(*loaded_attributes) + (parent_associations + child_associations).each { |association| association.reload } + end + + self + end + + # Reload specific attributes + # + # ==== Parameters + # *attributes]>:: name of attribute + # + # ==== Returns + # self:: returns the class itself + # + # -- + # @api public + def reload_attributes(*attributes) + unless attributes.empty? || new_record? + collection.reload(:fields => attributes) + end + + self + end + + # Checks if the model has been saved + # + # ==== Returns + # True:: status if the model is new + # + # -- + # @api public + def new_record? + !defined?(@new_record) || @new_record + end + + # all the attributes of the model + # + # ==== Returns + # Hash[]:: All the (non)-lazy attributes + # + # -- + # @api public + def attributes + properties.map do |p| + [p.name, send(p.getter)] if p.reader_visibility == :public + end.compact.to_hash + end + + # Mass assign of attributes + # + # ==== Parameters + # value_hash ]>:: + # + # -- + # @api public + def attributes=(values_hash) + values_hash.each_pair do |k,v| + setter = "#{k.to_s.sub(/\?\z/, '')}=" + + if respond_to?(setter) + send(setter, v) + else + raise NameError, "#{setter} is not a public property" + end + end + end + + # Updates attributes and saves model + # + # ==== Parameters + # attributes Attributes to be updated + # keys keys of Hash to update (others won't be updated) + # + # ==== Returns + # if model got saved or not + # + #- + # @api public + def update_attributes(hash, *update_only) + unless hash.is_a?(Hash) + raise ArgumentError, "Expecting the first parameter of " + + "update_attributes to be a hash; got #{hash.inspect}" + end + loop_thru = update_only.empty? ? hash.keys : update_only + loop_thru.each { |attr| send("#{attr}=", hash[attr]) } + save + end + + # TODO: add docs + def to_query(query = {}) + model.to_query(repository, key, query) unless new_record? + end + + protected + + def properties + model.properties(repository.name) + end + + def key_properties + model.key(repository.name) + end + + def relationships + model.relationships(repository.name) + end + + # Needs to be a protected method so that it is hookable + def create + # Can't create a resource that is not dirty and doesn't have serial keys + return false if new_record? && !dirty? && !model.key.any? { |p| p.serial? } + # set defaults for new resource + properties.each do |property| + next if attribute_loaded?(property.name) + property.set(self, property.default_for(self)) + end + + return false unless repository.create([ self ]) == 1 + + @repository = repository + @new_record = false + + repository.identity_map(model).set(key, self) + + true + end + + # Needs to be a protected method so that it is hookable + def update + dirty_attributes = self.dirty_attributes + return true if dirty_attributes.empty? + repository.update(dirty_attributes, to_query) == 1 + end + + private + + def initialize(attributes = {}) # :nodoc: + assert_valid_model + self.attributes = attributes + end + + def assert_valid_model # :nodoc: + return if self.class._valid_model + properties = self.properties + + if properties.empty? && relationships.empty? + raise IncompleteResourceError, "#{model.name} must have at least one property or relationship to be initialized." + end + + if properties.key.empty? + raise IncompleteResourceError, "#{model.name} must have a key." + end + + self.class.instance_variable_set("@_valid_model", true) + end + + # TODO document + # @api semipublic + def attribute_get!(name) + properties[name].get!(self) + end + + # TODO document + # @api semipublic + def attribute_set!(name, value) + properties[name].set!(self, value) + end + + def lazy_load(name) + reload_attributes(*properties.lazy_load_context(name) - loaded_attributes) + end + + def child_associations + @child_associations ||= [] + end + + def parent_associations + @parent_associations ||= [] + end + + # TODO: move to dm-more/dm-transactions + module Transaction + # Produce a new Transaction for the class of this Resource + # + # ==== Returns + # :: + # a new DataMapper::Adapters::Transaction with all DataMapper::Repositories + # of the class of this DataMapper::Resource added. + #- + # @api public + # + # TODO: move to dm-more/dm-transactions + def transaction + model.transaction { |*block_args| yield(*block_args) } + end + end # module Transaction + + include Transaction + end # module Resource +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/scope.rb b/vendor/dm-core-0.9.6/lib/dm-core/scope.rb new file mode 100644 index 0000000..c097875 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/scope.rb @@ -0,0 +1,58 @@ +module DataMapper + module Scope + Model.append_extensions self + + # @api private + def default_scope(repository_name = nil) + repository_name = self.default_repository_name if repository_name == :default || repository_name.nil? + @default_scope ||= {} + @default_scope[repository_name] ||= {} + end + + # @api private + def query + scope_stack.last + end + + protected + + # @api semipublic + def with_scope(query) + # merge the current scope with the passed in query + with_exclusive_scope(self.query ? self.query.merge(query) : query) {|*block_args| yield(*block_args) } + end + + # @api semipublic + def with_exclusive_scope(query) + query = DataMapper::Query.new(repository, self, query) if query.kind_of?(Hash) + + scope_stack << query + + begin + return yield(query) + ensure + scope_stack.pop + end + end + + private + + # @api private + def merge_with_default_scope(query) + DataMapper::Query.new(query.repository, query.model, default_scope_for_query(query)).update(query) + end + + # @api private + def scope_stack + scope_stack_for = Thread.current[:dm_scope_stack] ||= {} + scope_stack_for[self] ||= [] + end + + # @api private + def default_scope_for_query(query) + repository_name = query.repository.name + default_repository_name = query.model.default_repository_name + self.default_scope(default_repository_name).merge(self.default_scope(repository_name)) + end + end # module Scope +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/support.rb b/vendor/dm-core-0.9.6/lib/dm-core/support.rb new file mode 100644 index 0000000..cfa5986 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/support.rb @@ -0,0 +1,7 @@ +dir = Pathname(__FILE__).dirname.expand_path / 'support' + +require dir / 'array' +require dir / 'assertions' +require dir / 'errors' +require dir / 'kernel' +require dir / 'symbol' diff --git a/vendor/dm-core-0.9.6/lib/dm-core/support/array.rb b/vendor/dm-core-0.9.6/lib/dm-core/support/array.rb new file mode 100644 index 0000000..8ccfb3b --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/support/array.rb @@ -0,0 +1,13 @@ +class Array + + ## + # atm it assumes self is an array of [key,value]-arrays + # this is just a better way to make hashes than Hash[*array.flatten] + # since you cannot flatten only one level in ruby 1.8.6 + # + def to_hash + h = {} + self.each{ |k,v| h[k] = v } + h + end +end # class Symbol diff --git a/vendor/dm-core-0.9.6/lib/dm-core/support/assertions.rb b/vendor/dm-core-0.9.6/lib/dm-core/support/assertions.rb new file mode 100644 index 0000000..b2b4dc6 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/support/assertions.rb @@ -0,0 +1,8 @@ +module DataMapper + module Assertions + def assert_kind_of(name, value, *klasses) + klasses.each { |k| return if value.kind_of?(k) } + raise ArgumentError, "+#{name}+ should be #{klasses.map { |k| k.name } * ' or '}, but was #{value.class.name}", caller(2) + end + end +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/support/errors.rb b/vendor/dm-core-0.9.6/lib/dm-core/support/errors.rb new file mode 100644 index 0000000..1f6ff48 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/support/errors.rb @@ -0,0 +1,23 @@ +#Some useful errors types +module DataMapper + class ValidationError < StandardError; end + + class ObjectNotFoundError < StandardError; end + + class MaterializationError < StandardError; end + + class RepositoryNotSetupError < StandardError; end + + class IncompleteResourceError < StandardError; end + + class PersistenceError < StandardError; end + + class PluginNotFoundError < StandardError; end +end # module DataMapper + +class StandardError + # Displays the specific error message and the backtrace associated with it. + def display + "#{message}\n\t#{backtrace.join("\n\t")}" + end +end # class StandardError diff --git a/vendor/dm-core-0.9.6/lib/dm-core/support/kernel.rb b/vendor/dm-core-0.9.6/lib/dm-core/support/kernel.rb new file mode 100644 index 0000000..2fda366 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/support/kernel.rb @@ -0,0 +1,11 @@ +module Kernel + # Delegates to DataMapper::repository. + # Will not overwrite if a method of the same name is pre-defined. + def repository(*args) + if block_given? + DataMapper.repository(*args) { |*block_args| yield(*block_args) } + else + DataMapper.repository(*args) + end + end +end # module Kernel diff --git a/vendor/dm-core-0.9.6/lib/dm-core/support/symbol.rb b/vendor/dm-core-0.9.6/lib/dm-core/support/symbol.rb new file mode 100644 index 0000000..9984f54 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/support/symbol.rb @@ -0,0 +1,41 @@ +class Symbol + def gt + DataMapper::Query::Operator.new(self, :gt) + end + + def gte + DataMapper::Query::Operator.new(self, :gte) + end + + def lt + DataMapper::Query::Operator.new(self, :lt) + end + + def lte + DataMapper::Query::Operator.new(self, :lte) + end + + def not + DataMapper::Query::Operator.new(self, :not) + end + + def eql + DataMapper::Query::Operator.new(self, :eql) + end + + def like + DataMapper::Query::Operator.new(self, :like) + end + + def in + DataMapper::Query::Operator.new(self, :in) + end + + def asc + DataMapper::Query::Operator.new(self, :asc) + end + + def desc + DataMapper::Query::Operator.new(self, :desc) + end +end # class Symbol diff --git a/vendor/dm-core-0.9.6/lib/dm-core/transaction.rb b/vendor/dm-core-0.9.6/lib/dm-core/transaction.rb new file mode 100644 index 0000000..1329fb0 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/transaction.rb @@ -0,0 +1,267 @@ +# TODO: move to dm-more/dm-transactions + +module DataMapper + class Transaction + + attr_reader :transaction_primitives, :adapters, :state + + # + # Create a new DataMapper::Transaction + # + # @see DataMapper::Transaction#link + # + # In fact, it just calls #link with the given arguments at the end of the + # constructor. + # + def initialize(*things) + @transaction_primitives = {} + @state = :none + @adapters = {} + link(*things) + commit { |*block_args| yield(*block_args) } if block_given? + end + + # + # Associate this Transaction with some things. + # + # @param things the things you want this Transaction + # associated with + # @details [things a Transaction may be associatied with] + # DataMapper::Adapters::AbstractAdapter subclasses will be added as + # adapters as is. + # Arrays will have their elements added. + # DataMapper::Repositories will have their @adapters added. + # DataMapper::Resource subclasses will have all the repositories of all + # their properties added. + # DataMapper::Resource instances will have all repositories of all their + # properties added. + # @param block a block (taking one argument, the Transaction) to execute + # within this transaction. The transaction will begin and commit around + # the block, and rollback if an exception is raised. + # + def link(*things) + raise "Illegal state for link: #{@state}" unless @state == :none + things.each do |thing| + if thing.is_a?(Array) + link(*thing) + elsif thing.is_a?(DataMapper::Adapters::AbstractAdapter) + @adapters[thing] = :none + elsif thing.is_a?(DataMapper::Repository) + link(thing.adapter) + elsif thing.is_a?(Class) && thing.ancestors.include?(DataMapper::Resource) + link(*thing.repositories) + elsif thing.is_a?(DataMapper::Resource) + link(thing.model) + else + raise "Unknown argument to #{self}#link: #{thing.inspect}" + end + end + return commit { |*block_args| yield(*block_args) } if block_given? + return self + end + + # + # Begin the transaction + # + # Before #begin is called, the transaction is not valid and can not be used. + # + def begin + raise "Illegal state for begin: #{@state}" unless @state == :none + each_adapter(:connect_adapter, [:log_fatal_transaction_breakage]) + each_adapter(:begin_adapter, [:rollback_and_close_adapter_if_begin, :close_adapter_if_none]) + @state = :begin + end + + # + # Commit the transaction + # + # @param block a block (taking the one argument, the Transaction) to + # execute within this transaction. The transaction will begin and commit + # around the block, and roll back if an exception is raised. + # + # @note + # If no block is given, it will simply commit any changes made since the + # Transaction did #begin. + # + def commit + if block_given? + raise "Illegal state for commit with block: #{@state}" unless @state == :none + begin + self.begin + rval = within { |*block_args| yield(*block_args) } + self.commit if @state == :begin + return rval + rescue Exception => e + self.rollback if @state == :begin + raise e + end + else + raise "Illegal state for commit without block: #{@state}" unless @state == :begin + each_adapter(:prepare_adapter, [:rollback_and_close_adapter_if_begin, :rollback_prepared_and_close_adapter_if_prepare]) + each_adapter(:commit_adapter, [:log_fatal_transaction_breakage]) + each_adapter(:close_adapter, [:log_fatal_transaction_breakage]) + @state = :commit + end + end + + # + # Rollback the transaction + # + # Will undo all changes made during the transaction. + # + def rollback + raise "Illegal state for rollback: #{@state}" unless @state == :begin + each_adapter(:rollback_adapter_if_begin, [:rollback_and_close_adapter_if_begin, :close_adapter_if_none]) + each_adapter(:rollback_prepared_adapter_if_prepare, [:rollback_prepared_and_close_adapter_if_begin, :close_adapter_if_none]) + each_adapter(:close_adapter_if_open, [:log_fatal_transaction_breakage]) + @state = :rollback + end + + # + # Execute a block within this Transaction. + # + # @param block the block of code to execute. + # + # @note + # No #begin, #commit or #rollback is performed in #within, but this + # Transaction will pushed on the per thread stack of transactions for each + # adapter it is associated with, and it will ensures that it will pop the + # Transaction away again after the block is finished. + # + def within + raise "No block provided" unless block_given? + raise "Illegal state for within: #{@state}" unless @state == :begin + @adapters.each do |adapter, state| + adapter.push_transaction(self) + end + begin + return yield(self) + ensure + @adapters.each do |adapter, state| + adapter.pop_transaction + end + end + end + + def method_missing(meth, *args, &block) + if args.size == 1 && args.first.is_a?(DataMapper::Adapters::AbstractAdapter) + if (match = meth.to_s.match(/^(.*)_if_(none|begin|prepare|rollback|commit)$/)) + if self.respond_to?(match[1], true) + self.send(match[1], args.first) if state_for(args.first).to_s == match[2] + else + super + end + elsif (match = meth.to_s.match(/^(.*)_unless_(none|begin|prepare|rollback|commit)$/)) + if self.respond_to?(match[1], true) + self.send(match[1], args.first) unless state_for(args.first).to_s == match[2] + else + super + end + else + super + end + else + super + end + end + + def primitive_for(adapter) + raise "Unknown adapter #{adapter}" unless @adapters.include?(adapter) + raise "No primitive for #{adapter}" unless @transaction_primitives.include?(adapter) + @transaction_primitives[adapter] + end + + private + + def validate_primitive(primitive) + [:close, :begin, :prepare, :rollback, :rollback_prepared, :commit].each do |meth| + raise "Invalid primitive #{primitive}: doesnt respond_to?(#{meth.inspect})" unless primitive.respond_to?(meth) + end + return primitive + end + + def each_adapter(method, on_fail) + begin + @adapters.each do |adapter, state| + self.send(method, adapter) + end + rescue Exception => e + @adapters.each do |adapter, state| + on_fail.each do |fail_handler| + begin + self.send(fail_handler, adapter) + rescue Exception => e2 + DataMapper.logger.fatal("#{self}#each_adapter(#{method.inspect}, #{on_fail.inspect}) failed with #{e.inspect}: #{e.backtrace.join("\n")} - and when sending #{fail_handler} to #{adapter} we failed again with #{e2.inspect}: #{e2.backtrace.join("\n")}") + end + end + end + raise e + end + end + + def state_for(adapter) + raise "Unknown adapter #{adapter}" unless @adapters.include?(adapter) + @adapters[adapter] + end + + def do_adapter(adapter, what, prerequisite) + raise "No primitive for #{adapter}" unless @transaction_primitives.include?(adapter) + raise "Illegal state for #{what}: #{state_for(adapter)}" unless state_for(adapter) == prerequisite + DataMapper.logger.debug("#{adapter.name}: #{what}") + @transaction_primitives[adapter].send(what) + @adapters[adapter] = what + end + + def log_fatal_transaction_breakage(adapter) + DataMapper.logger.fatal("#{self} experienced a totally broken transaction execution. Presenting member #{adapter.inspect}.") + end + + def connect_adapter(adapter) + raise "Already a primitive for adapter #{adapter}" unless @transaction_primitives[adapter].nil? + @transaction_primitives[adapter] = validate_primitive(adapter.transaction_primitive) + end + + def close_adapter_if_open(adapter) + if @transaction_primitives.include?(adapter) + close_adapter(adapter) + end + end + + def close_adapter(adapter) + raise "No primitive for adapter" unless @transaction_primitives.include?(adapter) + @transaction_primitives[adapter].close + @transaction_primitives.delete(adapter) + end + + def begin_adapter(adapter) + do_adapter(adapter, :begin, :none) + end + + def prepare_adapter(adapter) + do_adapter(adapter, :prepare, :begin); + end + + def commit_adapter(adapter) + do_adapter(adapter, :commit, :prepare) + end + + def rollback_adapter(adapter) + do_adapter(adapter, :rollback, :begin) + end + + def rollback_prepared_adapter(adapter) + do_adapter(adapter, :rollback_prepared, :prepare) + end + + def rollback_prepared_and_close_adapter(adapter) + rollback_prepared_adapter(adapter) + close_adapter(adapter) + end + + def rollback_and_close_adapter(adapter) + rollback_adapter(adapter) + close_adapter(adapter) + end + + end # class Transaction +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/type.rb b/vendor/dm-core-0.9.6/lib/dm-core/type.rb new file mode 100755 index 0000000..353cbd5 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/type.rb @@ -0,0 +1,160 @@ +module DataMapper + + # = Types + # Provides means of writing custom types for properties. Each type is based + # on a ruby primitive and handles its own serialization and materialization, + # and therefore is responsible for providing those methods. + # + # To see complete list of supported types, see documentation for + # DataMapper::Property::TYPES + # + # == Defining new Types + # To define a new type, subclass DataMapper::Type, pick ruby primitive, and + # set the options for this type. + # + # class MyType < DataMapper::Type + # primitive String + # size 10 + # end + # + # Following this, you will be able to use MyType as a type for any given + # property. If special materialization and serialization is required, + # override the class methods + # + # class MyType < DataMapper::Type + # primitive String + # size 10 + # + # def self.dump(value, property) + # + # end + # + # def self.load(value) + # + # end + # end + class Type + PROPERTY_OPTIONS = [ + :accessor, :reader, :writer, + :lazy, :default, :nullable, :key, :serial, :field, :size, :length, + :format, :index, :unique_index, :check, :ordinal, :auto_validation, + :validates, :unique, :track, :precision, :scale + ] + + PROPERTY_OPTION_ALIASES = { + :size => [ :length ] + } + + class << self + + def configure(primitive_type, options) + @_primitive_type = primitive_type + @_options = options + + def self.inherited(base) + base.primitive @_primitive_type + + @_options.each do |k, v| + base.send(k, v) + end + end + + self + end + + # The Ruby primitive type to use as basis for this type. See + # DataMapper::Property::TYPES for list of types. + # + # @param primitive + # The class for the primitive. If nil is passed in, it returns the + # current primitive + # + # @return if the param is nil, return the current primitive. + # + # @api public + def primitive(primitive = nil) + return @primitive if primitive.nil? + + # TODO: change Integer to be used internally once most in-the-wild code + # is updated to use Integer for properties instead of Fixnum, or before + # DM 1.0, whichever comes first + if Fixnum == primitive + warn "#{primitive} properties are deprecated. Please use Integer instead" + primitive = Integer + end + + @primitive = primitive + end + + # Load DataMapper::Property options + PROPERTY_OPTIONS.each do |property_option| + self.class_eval <<-EOS, __FILE__, __LINE__ + def #{property_option}(arg = nil) + return @#{property_option} if arg.nil? + + @#{property_option} = arg + end + EOS + end + + # Create property aliases + PROPERTY_OPTION_ALIASES.each do |property_option, aliases| + aliases.each do |ali| + self.class_eval <<-EOS, __FILE__, __LINE__ + alias #{ali} #{property_option} + EOS + end + end + + # Gives all the options set on this type + # + # @return with all options and their values set on this type + # + # @api public + def options + options = {} + PROPERTY_OPTIONS.each do |method| + next if (value = send(method)).nil? + options[method] = value + end + options + end + end + + # Stub instance method for dumping + # + # @param value the value to dump + # @param property the property the type is being used by + # + # @return Dumped object + # + # @api public + def self.dump(value, property) + value + end + + # Stub instance method for loading + # + # @param value the value to serialize + # @param property the property the type is being used by + # + # @return Serialized object. Must be the same type as the Ruby primitive + # + # @api public + def self.load(value, property) + value + end + + def self.bind(property) + # This method should not modify the state of this type class, and + # should produce no side-effects on the type class. It's just a + # hook to allow your custom-type to modify the property it's bound to. + end + + end # class Type + + def self.Type(primitive_type, options = {}) + Class.new(Type).configure(primitive_type, options) + end + +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/type_map.rb b/vendor/dm-core-0.9.6/lib/dm-core/type_map.rb new file mode 100644 index 0000000..4bd264c --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/type_map.rb @@ -0,0 +1,80 @@ +# TODO: move to dm-more/dm-migrations + +module DataMapper + class TypeMap + + attr_accessor :parent, :chains + + def initialize(parent = nil, &blk) + @parent, @chains = parent, {} + + blk.call(self) unless blk.nil? + end + + def map(type) + @chains[type] ||= TypeChain.new + end + + def lookup(type) + if type_mapped?(type) + lookup_from_map(type) + else + lookup_by_type(type) + end + end + + def lookup_from_map(type) + lookup_from_parent(type).merge(map(type).translate) + end + + def lookup_from_parent(type) + if !@parent.nil? && @parent.type_mapped?(type) + @parent[type] + else + {} + end + end + + # @raise if the type is not a default primitive or has a type map entry. + def lookup_by_type(type) + raise DataMapper::TypeMap::Error.new(type) unless type.respond_to?(:primitive) && !type.primitive.nil? + + lookup(type.primitive).merge(Type::PROPERTY_OPTIONS.inject({}) {|h, k| h[k] = type.send(k); h}) + end + + alias [] lookup + + def type_mapped?(type) + @chains.has_key?(type) || (@parent.nil? ? false : @parent.type_mapped?(type)) + end + + class TypeChain + attr_accessor :primitive, :attributes + + def initialize + @attributes = {} + end + + def to(primitive) + @primitive = primitive + self + end + + def with(attributes) + raise "method 'with' expects a hash" unless attributes.kind_of?(Hash) + @attributes.merge!(attributes) + self + end + + def translate + @attributes.merge((@primitive.nil? ? {} : {:primitive => @primitive})) + end + end # class TypeChain + + class Error < StandardError + def initialize(type) + super("Type #{type} must have a default primitive or type map entry") + end + end + end # class TypeMap +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/types.rb b/vendor/dm-core-0.9.6/lib/dm-core/types.rb new file mode 100644 index 0000000..870c309 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/types.rb @@ -0,0 +1,19 @@ +dir = Pathname(__FILE__).dirname.expand_path / 'types' + +require dir / 'boolean' +require dir / 'discriminator' +require dir / 'text' +require dir / 'paranoid_datetime' +require dir / 'paranoid_boolean' +require dir / 'object' +require dir / 'serial' + +unless defined?(DM) + DM = DataMapper::Types +end + +module DataMapper + module Resource + include Types + end # module Resource +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/types/boolean.rb b/vendor/dm-core-0.9.6/lib/dm-core/types/boolean.rb new file mode 100644 index 0000000..2568f1a --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/types/boolean.rb @@ -0,0 +1,7 @@ +module DataMapper + module Types + class Boolean < DataMapper::Type + primitive TrueClass + end # class Boolean + end # module Types +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/types/discriminator.rb b/vendor/dm-core-0.9.6/lib/dm-core/types/discriminator.rb new file mode 100644 index 0000000..d11989b --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/types/discriminator.rb @@ -0,0 +1,34 @@ +module DataMapper + module Types + class Discriminator < DataMapper::Type + primitive Class + track :set + default lambda { |r,p| p.model } + nullable false + + def self.bind(property) + model = property.model + + model.class_eval <<-EOS, __FILE__, __LINE__ + def self.descendants + (@descendants ||= []).uniq! + @descendants + end + + after_class_method :inherited, :add_scope_for_discriminator + + def self.add_scope_for_discriminator(retval, target) + target.descendants << target + target.default_scope.update(#{property.name.inspect} => target.descendants) + propagate_descendants(target) + end + + def self.propagate_descendants(target) + descendants << target + superclass.propagate_descendants(target) if superclass.respond_to?(:propagate_descendants) + end + EOS + end + end # class Discriminator + end # module Types +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/types/object.rb b/vendor/dm-core-0.9.6/lib/dm-core/types/object.rb new file mode 100644 index 0000000..e49fc72 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/types/object.rb @@ -0,0 +1,24 @@ +require "base64" + +module DataMapper + module Types + class Object < DataMapper::Type + primitive String + size 65535 + lazy true + track :hash + + def self.typecast(value, property) + value + end + + def self.dump(value, property) + Base64.encode64(Marshal.dump(value)) + end + + def self.load(value, property) + value.nil? ? nil : Marshal.load(Base64.decode64(value)) + end + end + end +end diff --git a/vendor/dm-core-0.9.6/lib/dm-core/types/paranoid_boolean.rb b/vendor/dm-core-0.9.6/lib/dm-core/types/paranoid_boolean.rb new file mode 100644 index 0000000..b4b9e98 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/types/paranoid_boolean.rb @@ -0,0 +1,34 @@ +module DataMapper + module Types + class ParanoidBoolean < DataMapper::Type(Boolean) + primitive TrueClass + default false + lazy true + + def self.bind(property) + model = property.model + repository = property.repository + + model.send(:set_paranoid_property, property.name){true} + + model.class_eval <<-EOS, __FILE__, __LINE__ + + def self.with_deleted + with_exclusive_scope(#{property.name.inspect} => true) do + yield + end + end + + def destroy + self.class.paranoid_properties.each do |name, blk| + attribute_set(name, blk.call(self)) + end + save + end + EOS + + model.default_scope(repository.name).update(property.name => false) + end + end # class ParanoidBoolean + end # module Types +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/types/paranoid_datetime.rb b/vendor/dm-core-0.9.6/lib/dm-core/types/paranoid_datetime.rb new file mode 100644 index 0000000..9f13891 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/types/paranoid_datetime.rb @@ -0,0 +1,33 @@ +module DataMapper + module Types + class ParanoidDateTime < DataMapper::Type(DateTime) + primitive DateTime + lazy true + + def self.bind(property) + model = property.model + repository = property.repository + + model.send(:set_paranoid_property, property.name){DateTime.now} + + model.class_eval <<-EOS, __FILE__, __LINE__ + + def self.with_deleted + with_exclusive_scope(#{property.name.inspect}.not => nil) do + yield + end + end + + def destroy + self.class.paranoid_properties.each do |name, blk| + attribute_set(name, blk.call(self)) + end + save + end + EOS + + model.default_scope(repository.name).update(property.name => nil) + end + end # class ParanoidDateTime + end # module Types +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/types/serial.rb b/vendor/dm-core-0.9.6/lib/dm-core/types/serial.rb new file mode 100644 index 0000000..c80a32d --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/types/serial.rb @@ -0,0 +1,9 @@ +# FIXME: can we alias this to the class Text if it isn't already defined? +module DataMapper + module Types + class Serial < DataMapper::Type + primitive Integer + serial true + end # class Text + end # module Types +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/types/text.rb b/vendor/dm-core-0.9.6/lib/dm-core/types/text.rb new file mode 100644 index 0000000..7715a8f --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/types/text.rb @@ -0,0 +1,10 @@ +# FIXME: can we alias this to the class Text if it isn't already defined? +module DataMapper + module Types + class Text < DataMapper::Type + primitive String + size 65535 + lazy true + end # class Text + end # module Types +end # module DataMapper diff --git a/vendor/dm-core-0.9.6/lib/dm-core/version.rb b/vendor/dm-core-0.9.6/lib/dm-core/version.rb new file mode 100644 index 0000000..bf13e08 --- /dev/null +++ b/vendor/dm-core-0.9.6/lib/dm-core/version.rb @@ -0,0 +1,3 @@ +module DataMapper + VERSION = '0.9.6' unless defined?(DataMapper::VERSION) +end diff --git a/vendor/dm-core-0.9.6/script/all b/vendor/dm-core-0.9.6/script/all new file mode 100755 index 0000000..d81bbec --- /dev/null +++ b/vendor/dm-core-0.9.6/script/all @@ -0,0 +1,5 @@ +#!/usr/bin/env sh +rake spec:unit +ADAPTER=sqlite3 rake spec:integration +ADAPTER=mysql rake spec:integration +ADAPTER=postgres rake spec:integration diff --git a/vendor/dm-core-0.9.6/script/performance.rb b/vendor/dm-core-0.9.6/script/performance.rb new file mode 100755 index 0000000..edd3728 --- /dev/null +++ b/vendor/dm-core-0.9.6/script/performance.rb @@ -0,0 +1,284 @@ +#!/usr/bin/env ruby + +require File.join(File.dirname(__FILE__), '..', 'lib', 'dm-core') +require File.join(File.dirname(__FILE__), '..', 'lib', 'dm-core', 'version') + +require 'rubygems' +require 'ftools' + +# sudo gem install rbench +# OR git clone git://github.com/somebee/rbench.git , rake install +gem 'rbench', '>=0.2.2' +require 'rbench' + +gem 'faker', '>=0.3.1' +require 'faker' + +gem 'activerecord', '>=2.1.0' +require 'active_record' + +socket_file = Pathname.glob(%w[ + /opt/local/var/run/mysql5/mysqld.sock + tmp/mysqld.sock + /tmp/mysqld.sock + tmp/mysql.sock + /tmp/mysql.sock + /var/mysql/mysql.sock + /var/run/mysqld/mysqld.sock +]).find { |path| path.socket? } + +configuration_options = { + :adapter => 'mysql', + :username => 'root', + :password => '', + :database => 'data_mapper_1', +} + +configuration_options[:socket] = socket_file unless socket_file.nil? + +log_dir = DataMapper.root / 'log' +log_dir.mkdir unless log_dir.directory? + +DataMapper::Logger.new(log_dir / 'dm.log', :off) +adapter = DataMapper.setup(:default, "mysql://root@localhost/data_mapper_1?socket=#{socket_file}") + +if configuration_options[:adapter] + sqlfile = File.join(File.dirname(__FILE__),'..','tmp','performance.sql') + mysql_bin = %w[mysql mysql5].select{|bin| `which #{bin}`.length > 0 } + mysqldump_bin = %w[mysqldump mysqldump5].select{|bin| `which #{bin}`.length > 0 } +end + +ActiveRecord::Base.logger = Logger.new(log_dir / 'ar.log') +ActiveRecord::Base.logger.level = 0 + +ActiveRecord::Base.establish_connection(configuration_options) + +class ARExhibit < ActiveRecord::Base #:nodoc: + set_table_name 'exhibits' + + belongs_to :user, :class_name => 'ARUser', :foreign_key => 'user_id' +end + +class ARUser < ActiveRecord::Base #:nodoc: + set_table_name 'users' + + has_many :exhibits, :foreign_key => 'user_id' + +end + +ARExhibit.find_by_sql('SELECT 1') + +class Exhibit + include DataMapper::Resource + + property :id, Serial + property :name, String + property :zoo_id, Integer + property :user_id, Integer + property :notes, Text, :lazy => true + property :created_on, Date + + belongs_to :user +# property :updated_at, DateTime +end + +class User + include DataMapper::Resource + + property :id, Serial + property :name, String + property :email, String + property :about, Text, :lazy => true + property :created_on, Date + +end + +touch_attributes = lambda do |exhibits| + [*exhibits].each do |exhibit| + exhibit.id + exhibit.name + exhibit.created_on + end +end + +touch_relationships = lambda do |exhibits| + [*exhibits].each do |exhibit| + exhibit.id + exhibit.name + exhibit.created_on + exhibit.user + end +end + + +c = configuration_options + +if sqlfile && File.exists?(sqlfile) + puts "Found data-file. Importing from #{sqlfile}" + #adapter.execute("LOAD DATA LOCAL INFILE '#{sqlfile}' INTO TABLE exhibits") + `#{mysql_bin} -u #{c[:username]} #{"-p#{c[:password]}" unless c[:password].blank?} #{c[:database]} < #{sqlfile}` +else + + puts "Generating data for benchmarking..." + + User.auto_migrate! + Exhibit.auto_migrate! + + users = [] + exhibits = [] + + # pre-compute the insert statements and fake data compilation, + # so the benchmarks below show the actual runtime for the execute + # method, minus the setup steps + + # Using the same paragraph for all exhibits because it is very slow + # to generate unique paragraphs for all exhibits. + paragraph = Faker::Lorem.paragraphs.join($/) + + 10_000.times do |i| + users << [ + 'INSERT INTO `users` (`name`,`email`,`created_on`) VALUES (?, ?, ?)', + Faker::Name.name, + Faker::Internet.email, + Date.today + ] + + exhibits << [ + 'INSERT INTO `exhibits` (`name`, `zoo_id`, `user_id`, `notes`, `created_on`) VALUES (?, ?, ?, ?, ?)', + Faker::Company.name, + rand(10).ceil, + i, + paragraph,#Faker::Lorem.paragraphs.join($/), + Date.today + ] + end + + puts "Inserting 10,000 users..." + 10_000.times { |i| adapter.execute(*users.at(i)) } + puts "Inserting 10,000 exhibits..." + 10_000.times { |i| adapter.execute(*exhibits.at(i)) } + + if sqlfile + answer = nil + until answer && answer[/^$|y|yes|n|no/] + print("Would you like to dump data into tmp/performance.sql (for faster setup)? [Yn]"); + STDOUT.flush + answer = gets + end + + if answer[/^$|y|yes/] + File.makedirs(File.dirname(sqlfile)) + #adapter.execute("SELECT * INTO OUTFILE '#{sqlfile}' FROM exhibits;") + `#{mysqldump_bin} -u #{c[:username]} #{"-p#{c[:password]}" unless c[:password].blank?} #{c[:database]} exhibits users > #{sqlfile}` + puts "File saved\n" + end + end + +end + +TIMES = ENV['x'] ? ENV['x'].to_i : 10_000 + +puts "You can specify how many times you want to run the benchmarks with rake:perf x=(number)" +puts "Some tasks will be run 10 and 1000 times less than (number)" +puts "Benchmarks will now run #{TIMES} times" +# Inform about slow benchmark +# answer = nil +# until answer && answer[/^$|y|yes|n|no/] +# print("A slow benchmark exposing problems with SEL is newly added. It takes approx. 20s\n"); +# print("you have scheduled it to run #{TIMES / 100} times.\nWould you still include the particular benchmark? [Yn]") +# STDOUT.flush +# answer = gets +# end +# run_rel_bench = answer[/^$|y|yes/] ? true : false + + +RBench.run(TIMES) do + + column :times + column :ar, :title => "AR 2.1" + column :dm, :title => "DM #{DataMapper::VERSION}" + column :diff, :compare => [:ar,:dm] + + report "Model.new (instantiation)" do + ar { ARExhibit.new } + dm { Exhibit.new } + end + + report "Model.new (setting attributes)" do + attrs = {:name => 'sam', :zoo_id => 1} + ar { ARExhibit.new(attrs) } + dm { Exhibit.new(attrs) } + end + + report "Model.get specific (not cached)" do + ActiveRecord::Base.uncached { ar { touch_attributes[ARExhibit.find(1)] } } + dm { touch_attributes[Exhibit.get(1)] } + end + + report "Model.get specific (cached)" do + ActiveRecord::Base.cache { ar { touch_attributes[ARExhibit.find(1)] } } + Exhibit.repository(:default) { dm { touch_attributes[Exhibit.get(1)] } } + end + + report "Model.first" do + ar { touch_attributes[ARExhibit.first] } + dm { touch_attributes[Exhibit.first] } + end + + report "Model.all limit(100)", (TIMES / 10.0).ceil do + ar { touch_attributes[ARExhibit.find(:all, :limit => 100)] } + dm { touch_attributes[Exhibit.all(:limit => 100)] } + end + + report "Model.all limit(100) with relationship", (TIMES / 10.0).ceil do + ar { touch_relationships[ARExhibit.all(:limit => 100, :include => [:user])] } + dm { touch_relationships[Exhibit.all(:limit => 100)] } + end + + report "Model.all limit(10,000)", (TIMES / 1000.0).ceil do + ar { touch_attributes[ARExhibit.find(:all, :limit => 10_000)] } + dm { touch_attributes[Exhibit.all(:limit => 10_000)] } + end + + create_exhibit = { + :name => Faker::Company.name, + :zoo_id => rand(10).ceil, + :notes => Faker::Lorem.paragraphs.join($/), + :created_on => Date.today + } + + report "Model.create" do + ar { ARExhibit.create(create_exhibit) } + dm { Exhibit.create(create_exhibit) } + end + + report "Resource#attributes" do + attrs_first = {:name => 'sam', :zoo_id => 1} + attrs_second = {:name => 'tom', :zoo_id => 1} + ar { e = ARExhibit.new(attrs_first); e.attributes = attrs_second } + dm { e = Exhibit.new(attrs_first); e.attributes = attrs_second } + end + + report "Resource#update" do + ar { e = ARExhibit.find(1); e.name = 'bob'; e.save } + dm { e = Exhibit.get(1); e.name = 'bob'; e.save } + end + + report "Resource#destroy" do + ar { ARExhibit.first.destroy } + dm { Exhibit.first.destroy } + end + + report "Model.transaction" do + ar { ARExhibit.transaction { ARExhibit.new } } + dm { Exhibit.transaction { Exhibit.new } } + end + + summary "Total" +end + +connection = adapter.send(:create_connection) +command = connection.create_command("DROP TABLE exhibits") +command = connection.create_command("DROP TABLE users") +command.execute_non_query rescue nil +connection.close diff --git a/vendor/dm-core-0.9.6/script/profile.rb b/vendor/dm-core-0.9.6/script/profile.rb new file mode 100755 index 0000000..14471b2 --- /dev/null +++ b/vendor/dm-core-0.9.6/script/profile.rb @@ -0,0 +1,87 @@ +#!/usr/bin/env ruby + +require File.join(File.dirname(__FILE__), '..', 'lib', 'dm-core') + +require 'rubygems' + +gem 'ruby-prof', '>=0.6.0' +require 'ruby-prof' + +gem 'faker', '>=0.3.1' +require 'faker' + +OUTPUT = DataMapper.root / 'profile_results.txt' +#OUTPUT = DataMapper.root / 'profile_results.html' + +SOCKET_FILE = Pathname.glob(%w[ + /opt/local/var/run/mysql5/mysqld.sock + /tmp/mysqld.sock + /tmp/mysql.sock + /var/mysql/mysql.sock + /var/run/mysqld/mysqld.sock +]).find { |path| path.socket? } + +DataMapper::Logger.new(DataMapper.root / 'log' / 'dm.log', :debug) +DataMapper.setup(:default, "mysql://root@localhost/data_mapper_1?socket=#{SOCKET_FILE}") + +class Exhibit + include DataMapper::Resource + + property :id, Serial + property :name, String + property :zoo_id, Integer + property :notes, Text, :lazy => true + property :created_on, Date +# property :updated_at, DateTime + + auto_migrate! + create # create one row for testing +end + +touch_attributes = lambda do |exhibits| + [*exhibits].each do |exhibit| + exhibit.id + exhibit.name + exhibit.created_on + exhibit.updated_at + end +end + +# RubyProf, making profiling Ruby pretty since 1899! +def profile(&b) + result = RubyProf.profile &b + printer = RubyProf::FlatPrinter.new(result) + #printer = RubyProf::GraphHtmlPrinter.new(result) + printer.print(OUTPUT.open('w+')) +end + +profile do +# 10_000.times { touch_attributes[Exhibit.get(1)] } +# +# repository(:default) do +# 10_000.times { touch_attributes[Exhibit.get(1)] } +# end +# +# 1000.times { touch_attributes[Exhibit.all(:limit => 100)] } +# +# repository(:default) do +# 1000.times { touch_attributes[Exhibit.all(:limit => 100)] } +# end +# +# 10.times { touch_attributes[Exhibit.all(:limit => 10_000)] } +# +# repository(:default) do +# 10.times { touch_attributes[Exhibit.all(:limit => 10_000)] } +# end + + create_exhibit = { + :name => Faker::Company.name, + :zoo_id => rand(10).ceil, + :notes => Faker::Lorem.paragraphs.join($/), + :created_on => Date.today + } + + 1000.times { Exhibit.create(create_exhibit) } +end + +puts "Done!" diff --git a/vendor/dm-core-0.9.6/spec/integration/association_spec.rb b/vendor/dm-core-0.9.6/spec/integration/association_spec.rb new file mode 100644 index 0000000..91c728b --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/association_spec.rb @@ -0,0 +1,1383 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +if HAS_SQLITE3 + describe DataMapper::Associations do + before :all do + db1 = File.expand_path(File.join(File.dirname(__FILE__), "custom_db1_sqlite3.db")) + db2 = File.expand_path(File.join(File.dirname(__FILE__), "custom_db2_sqlite3.db")) + FileUtils.touch(db1) + FileUtils.touch(db2) + DataMapper.setup(:custom_db1, "sqlite3://#{db1}") + DataMapper.setup(:custom_db2, "sqlite3://#{db2}") + class CustomParent + include DataMapper::Resource + def self.default_repository_name + :custom_db1 + end + property :id, Serial + property :name, String + repository(:custom_db2) do + has n, :custom_childs + end + end + class CustomChild + include DataMapper::Resource + def self.default_repository_name + :custom_db2 + end + property :id, Serial + property :name, String + repository(:custom_db1) do + belongs_to :custom_parent + end + end + + end + before :each do + [ CustomChild, CustomParent ].each { |m| m.auto_migrate! } + + parent = CustomParent.create(:name => "mother") + child1 = parent.custom_childs.create(:name => "son") + child2 = parent.custom_childs.create(:name => "daughter") + + @parent = CustomParent.first(:name => "mother") + @child1 = CustomChild.first(:name => "son") + @child2 = CustomChild.first(:name => "daughter") + end + it "should be able to handle has_many relationships to other repositories" do + @parent.custom_childs.size.should == 2 + @parent.custom_childs.include?(@child1).should == true + @parent.custom_childs.include?(@child2).should == true + @parent.custom_childs.delete(@child1) + @parent.custom_childs.save + @parent.reload + @parent.custom_childs.size.should == 1 + @parent.custom_childs.include?(@child2).should == true + end + it "should be able to handle belongs_to relationships to other repositories" do + @child1.custom_parent.should == @parent + @child2.custom_parent.should == @parent + @child1.custom_parent = nil + @child1.save + @child1.reload + @child1.custom_parent.should == nil + @parent.reload + @parent.custom_childs.size.should == 1 + @parent.custom_childs.include?(@child2).should == true + end + end +end + +if ADAPTER + repository(ADAPTER) do + class Machine + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + + has n, :areas + has n, :fussy_areas, :class_name => 'Area', :rating.gte => 3, :type => 'particular' + end + + class Area + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + property :rating, Integer + property :type, String + + belongs_to :machine + end + + class Pie + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + + belongs_to :sky + end + + class Sky + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + + has 1, :pie + end + + class Ultrahost + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + + has n, :ultraslices, :order => [:id.desc] + end + + class Ultraslice + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + + belongs_to :ultrahost + end + + class Node + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + + has n, :children, :class_name => 'Node', :child_key => [ :parent_id ] + belongs_to :parent, :class_name => 'Node', :child_key => [ :parent_id ] + end + + class MadeUpThing + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + belongs_to :area + belongs_to :machine + end + + module Models + class Project + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :title, String, :length => 255, :key => true + property :summary, DataMapper::Types::Text + + has n, :tasks + has 1, :goal + end + + class Goal + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :title, String, :length => 255, :key => true + property :summary, DataMapper::Types::Text + + belongs_to :project + end + + class Task + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :title, String, :length => 255, :key => true + property :description, DataMapper::Types::Text + + belongs_to :project + end + end + + class Galaxy + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :name, String, :key => true, :length => 255 + property :size, Float, :key => true, :precision => 15, :scale => 6 + end + + class Star + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + belongs_to :galaxy + end + + end + + describe DataMapper::Associations do + describe 'namespaced associations' do + before do + Models::Project.auto_migrate!(ADAPTER) + Models::Task.auto_migrate!(ADAPTER) + Models::Goal.auto_migrate!(ADAPTER) + end + + it 'should allow namespaced classes in parent and child for many <=> one' do + m = Models::Project.new(:title => 'p1', :summary => 'sum1') + m.tasks << Models::Task.new(:title => 't1', :description => 'desc 1') + m.save + + t = Models::Task.first(:title => 't1') + + t.project.should_not be_nil + t.project.title.should == 'p1' + t.project.tasks.size.should == 1 + + p = Models::Project.first(:title => 'p1') + + p.tasks.size.should == 1 + p.tasks[0].title.should == 't1' + end + + it 'should allow namespaced classes in parent and child for one <=> one' do + g = Models::Goal.new(:title => "g2", :summary => "desc 2") + p = Models::Project.create(:title => "p2", :summary => "sum 2", :goal => g) + + pp = Models::Project.first(:title => 'p2') + pp.goal.title.should == "g2" + + g = Models::Goal.first(:title => "g2") + + g.project.should_not be_nil + g.project.title.should == 'p2' + + g.project.goal.should_not be_nil + end + end + + describe 'many to one associations' do + before do + Machine.auto_migrate!(ADAPTER) + Area.auto_migrate!(ADAPTER) + MadeUpThing.auto_migrate!(ADAPTER) + + machine1 = Machine.create(:name => 'machine1') + machine2 = Machine.create(:name => 'machine2') + area1 = Area.create(:name => 'area1', :machine => machine1) + area2 = Area.create(:name => 'area2') + end + + it '#belongs_to' do + area = Area.new + area.should respond_to(:machine) + area.should respond_to(:machine=) + end + + it 'should create the foreign key property immediately' do + class Duck + include DataMapper::Resource + property :id, Serial + belongs_to :sky + end + Duck.properties.slice(:sky_id).compact.should_not be_empty + duck = Duck.new + duck.should respond_to(:sky_id) + duck.should respond_to(:sky_id=) + end + + it 'should load without the parent' + + it 'should allow substituting the parent' do + area1 = Area.first(:name => 'area1') + machine2 = Machine.first(:name => 'machine2') + + area1.machine = machine2 + area1.save + Area.first(:name => 'area1').machine.should == machine2 + end + + it 'should save both the object and parent if both are new' do + area1 = Area.new(:name => 'area1') + area1.machine = Machine.new(:name => 'machine1') + area1.save + area1.machine_id.should == area1.machine.id + end + + it '#belongs_to with namespaced models' do + repository(ADAPTER) do + module FlightlessBirds + class Ostrich + include DataMapper::Resource + property :id, Serial + property :name, String + belongs_to :sky # there's something sad about this :'( + end + end + + FlightlessBirds::Ostrich.properties(ADAPTER).slice(:sky_id).compact.should_not be_empty + end + end + + it 'should load the associated instance' do + machine1 = Machine.first(:name => 'machine1') + Area.first(:name => 'area1').machine.should == machine1 + end + + it 'should save the association key in the child' do + machine2 = Machine.first(:name => 'machine2') + + Area.create(:name => 'area3', :machine => machine2) + Area.first(:name => 'area3').machine.should == machine2 + end + + it 'should set the association key immediately' do + machine = Machine.first(:name => 'machine1') + Area.new(:machine => machine).machine_id.should == machine.id + end + + it "should be able to set an association obtained from another association" do + machine1 = Machine.first(:name => 'machine1') + area1 = Area.first(:name => 'area1') + area1.machine = machine1 + + m = MadeUpThing.create(:machine => area1.machine, :name => "Weird") + + m.machine_id.should == machine1.id + end + + it 'should save the parent upon saving of child' do + e = Machine.new(:name => 'machine10') + y = Area.create(:name => 'area10', :machine => e) + + y.machine.name.should == 'machine10' + Machine.first(:name => 'machine10').should_not be_nil + end + + it 'should set and retrieve associations on not yet saved objects' do + e = Machine.create(:name => 'machine10') + y = e.areas.build(:name => 'area10') + + y.machine.name.should == 'machine10' + end + + it 'should convert NULL parent ids into nils' do + Area.first(:name => 'area2').machine.should be_nil + end + + it 'should save nil parents as NULL ids' do + y1 = Area.create(:id => 20, :name => 'area20') + y2 = Area.create(:id => 30, :name => 'area30', :machine => nil) + + y1.id.should == 20 + y1.machine.should be_nil + y2.id.should == 30 + y2.machine.should be_nil + end + + it 'should respect length on foreign keys' do + property = Star.relationships[:galaxy].child_key[:galaxy_name] + property.length.should == 255 + end + + it 'should respect precision and scale on foreign keys' do + property = Star.relationships[:galaxy].child_key[:galaxy_size] + property.precision.should == 15 + property.scale.should == 6 + end + + it 'should be reloaded when calling Resource#reload' do + e = Machine.new(:name => 'machine40') + y = Area.create(:name => 'area40', :machine => e) + + y.send(:machine_association).should_receive(:reload).once + + lambda { y.reload }.should_not raise_error + end + + it "should have machine when created using machine_id" do + m = Machine.create(:name => 'machineX') + a = Area.new(:machine_id => m.id) + a.machine.should == m + end + + it "should not have a machine when orphaned" do + a = Area.new(:machine_id => 42) + a.machine.should be_nil + end + end + + describe 'one to one associations' do + before do + Sky.auto_migrate!(ADAPTER) + Pie.auto_migrate!(ADAPTER) + + pie1 = Pie.create(:name => 'pie1') + pie2 = Pie.create(:name => 'pie2') + sky1 = Sky.create(:name => 'sky1', :pie => pie1) + end + + it '#has 1' do + s = Sky.new + s.should respond_to(:pie) + s.should respond_to(:pie=) + end + + it 'should allow substituting the child' do + sky1 = Sky.first(:name => 'sky1') + pie1 = Pie.first(:name => 'pie1') + pie2 = Pie.first(:name => 'pie2') + + sky1.pie.should == pie1 + pie2.sky.should be_nil + + sky1.pie = pie2 + sky1.save + + pie2.sky.should == sky1 + pie1.reload.sky.should be_nil + end + + it 'should load the associated instance' do + sky1 = Sky.first(:name => 'sky1') + pie1 = Pie.first(:name => 'pie1') + + sky1.pie.should == pie1 + end + + it 'should save the association key in the child' do + pie2 = Pie.first(:name => 'pie2') + + sky2 = Sky.create(:id => 2, :name => 'sky2', :pie => pie2) + pie2.sky.should == sky2 + end + + it 'should save the children upon saving of parent' do + p = Pie.new(:id => 10, :name => 'pie10') + s = Sky.create(:id => 10, :name => 'sky10', :pie => p) + + p.sky.should == s + + Pie.first(:name => 'pie10').should_not be_nil + end + + it 'should save nil parents as NULL ids' do + p1 = Pie.create(:id => 20, :name => 'pie20') + p2 = Pie.create(:id => 30, :name => 'pie30', :sky => nil) + + p1.id.should == 20 + p1.sky.should be_nil + p2.id.should == 30 + p2.sky.should be_nil + end + + it 'should be reloaded when calling Resource#reload' do + pie = Pie.first(:name => 'pie1') + pie.send(:sky_association).should_receive(:reload).once + lambda { pie.reload }.should_not raise_error + end + end + + describe 'one to many associations' do + before do + Ultrahost.auto_migrate!(ADAPTER) + Ultraslice.auto_migrate!(ADAPTER) + Machine.auto_migrate!(ADAPTER) + Area.auto_migrate!(ADAPTER) + + ultrahost1 = Ultrahost.create(:name => 'ultrahost1') + ultrahost2 = Ultrahost.create(:name => 'ultrahost2') + ultraslice1 = Ultraslice.create(:name => 'ultraslice1', :ultrahost => ultrahost1) + ultraslice2 = Ultraslice.create(:name => 'ultraslice2', :ultrahost => ultrahost1) + ultraslice3 = Ultraslice.create(:name => 'ultraslice3') + end + + it '#has n' do + h = Ultrahost.new + h.should respond_to(:ultraslices) + end + + it 'should allow removal of a child through a loaded association' do + ultrahost1 = Ultrahost.first(:name => 'ultrahost1') + ultraslice2 = ultrahost1.ultraslices.first + + ultrahost1.ultraslices.size.should == 2 + ultrahost1.ultraslices.delete(ultraslice2) + ultrahost1.ultraslices.size.should == 1 + + ultraslice2 = Ultraslice.first(:name => 'ultraslice2') + ultraslice2.ultrahost.should_not be_nil + + ultrahost1.save + + ultraslice2.reload.ultrahost.should be_nil + end + + it 'should use the IdentityMap correctly' do + repository(ADAPTER) do + ultrahost1 = Ultrahost.first(:name => 'ultrahost1') + + ultraslice = ultrahost1.ultraslices.first + ultraslice2 = ultrahost1.ultraslices(:order => [:id]).last # should be the same as 1 + ultraslice3 = Ultraslice.get(2) # should be the same as 1 + + ultraslice.object_id.should == ultraslice2.object_id + ultraslice.object_id.should == ultraslice3.object_id + end + end + + it '#<< should add exactly the parameters' do + machine = Machine.new(:name => 'my machine') + 4.times do |i| + machine.areas << Area.new(:name => "area nr #{i}") + end + machine.save + machine.areas.size.should == 4 + 4.times do |i| + machine.areas.any? do |area| + area.name == "area nr #{i}" + end.should == true + end + machine = Machine.get!(machine.id) + machine.areas.size.should == 4 + 4.times do |i| + machine.areas.any? do |area| + area.name == "area nr #{i}" + end.should == true + end + end + + it "#<< should add the correct number of elements if they are created" do + machine = Machine.create(:name => 'my machine') + 4.times do |i| + machine.areas << Area.create(:name => "area nr #{i}", :machine => machine) + end + machine.areas.size.should == 4 + end + + it "#build should add exactly one instance of the built record" do + machine = Machine.create(:name => 'my machine') + + original_size = machine.areas.size + machine.areas.build(:name => "an area", :machine => machine) + + machine.areas.size.should == original_size + 1 + end + + it '#<< should add default values for relationships that have conditions' do + # it should add default values + machine = Machine.new(:name => 'my machine') + machine.fussy_areas << Area.new(:name => 'area 1', :rating => 4 ) + machine.save + Area.first(:name => 'area 1').type.should == 'particular' + # it should not add default values if the condition's property already has a value + machine.fussy_areas << Area.new(:name => 'area 2', :rating => 4, :type => 'not particular') + machine.save + Area.first(:name => 'area 2').type.should == 'not particular' + # it should ignore non :eql conditions + machine.fussy_areas << Area.new(:name => 'area 3') + machine.save + Area.first(:name => 'area 3').rating.should == nil + end + + it 'should load the associated instances, in the correct order' do + ultrahost1 = Ultrahost.first(:name => 'ultrahost1') + + ultrahost1.ultraslices.should_not be_nil + ultrahost1.ultraslices.size.should == 2 + ultrahost1.ultraslices.first.name.should == 'ultraslice2' # ordered by [:id.desc] + ultrahost1.ultraslices.last.name.should == 'ultraslice1' + + ultraslice3 = Ultraslice.first(:name => 'ultraslice3') + + ultraslice3.ultrahost.should be_nil + end + + it 'should add and save the associated instance' do + ultrahost1 = Ultrahost.first(:name => 'ultrahost1') + ultrahost1.ultraslices << Ultraslice.new(:id => 4, :name => 'ultraslice4') + ultrahost1.save + + Ultraslice.first(:name => 'ultraslice4').ultrahost.should == ultrahost1 + end + + it 'should not save the associated instance if the parent is not saved' do + h = Ultrahost.new(:id => 10, :name => 'ultrahost10') + h.ultraslices << Ultraslice.new(:id => 10, :name => 'ultraslice10') + + Ultraslice.first(:name => 'ultraslice10').should be_nil + end + + it 'should save the associated instance upon saving of parent' do + h = Ultrahost.new(:id => 10, :name => 'ultrahost10') + h.ultraslices << Ultraslice.new(:id => 10, :name => 'ultraslice10') + h.save + + s = Ultraslice.first(:name => 'ultraslice10') + + s.should_not be_nil + s.ultrahost.should == h + end + + it 'should save the associated instances upon saving of parent when mass-assigned' do + h = Ultrahost.create(:id => 10, :name => 'ultrahost10', :ultraslices => [ Ultraslice.new(:id => 10, :name => 'ultraslice10') ]) + + s = Ultraslice.first(:name => 'ultraslice10') + + s.should_not be_nil + s.ultrahost.should == h + end + + it 'should have finder-functionality' do + h = Ultrahost.first(:name => 'ultrahost1') + + h.ultraslices.should have(2).entries + + s = h.ultraslices.all(:name => 'ultraslice2') + + s.should have(1).entries + s.first.id.should == 2 + + h.ultraslices.first(:name => 'ultraslice2').should == s.first + end + + it 'should be reloaded when calling Resource#reload' do + ultrahost = Ultrahost.first(:name => 'ultrahost1') + ultrahost.send(:ultraslices_association).should_receive(:reload).once + lambda { ultrahost.reload }.should_not raise_error + end + end + + describe 'many-to-one and one-to-many associations combined' do + before do + Node.auto_migrate!(ADAPTER) + + Node.create(:name => 'r1') + Node.create(:name => 'r2') + Node.create(:name => 'r1c1', :parent_id => 1) + Node.create(:name => 'r1c2', :parent_id => 1) + Node.create(:name => 'r1c3', :parent_id => 1) + Node.create(:name => 'r1c1c1', :parent_id => 3) + end + + it 'should properly set #parent' do + r1 = Node.get 1 + r1.parent.should be_nil + + n3 = Node.get 3 + n3.parent.should == r1 + + n6 = Node.get 6 + n6.parent.should == n3 + end + + it 'should properly set #children' do + r1 = Node.get(1) + off = r1.children + off.size.should == 3 + off.include?(Node.get(3)).should be_true + off.include?(Node.get(4)).should be_true + off.include?(Node.get(5)).should be_true + end + + it 'should allow to create root nodes' do + r = Node.create(:name => 'newroot') + r.parent.should be_nil + r.children.size.should == 0 + end + + it 'should properly delete nodes' do + r1 = Node.get 1 + + r1.children.size.should == 3 + r1.children.delete(Node.get(4)) + r1.save + Node.get(4).parent.should be_nil + r1.children.size.should == 2 + end + end + + describe 'through-associations' do + before :all do + repository(ADAPTER) do + module Sweets + class Shop + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + has n, :cakes # has n + has n, :recipes, :through => :cakes # has n => has 1 + has n, :ingredients, :through => :cakes # has n => has 1 => has n + has n, :creators, :through => :cakes # has n => has 1 => has 1 + has n, :ultraslices, :through => :cakes # has n => has n + has n, :bites, :through => :cakes # has n => has n => has n + has n, :shapes, :through => :cakes # has n => has n => has 1 + has n, :customers, :through => :cakes # has n => belongs_to (pending) + has 1, :shop_owner # has 1 + has 1, :wife, :through => :shop_owner # has 1 => has 1 + has 1, :ring, :through => :shop_owner # has 1 => has 1 => has 1 + has n, :coats, :through => :shop_owner # has 1 => has 1 => has n + has n, :children, :through => :shop_owner # has 1 => has n + has n, :toys, :through => :shop_owner # has 1 => has n => has n + has n, :boogers, :through => :shop_owner # has 1 => has n => has 1 + end + + class ShopOwner + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :shop + has 1, :wife + has n, :children + has n, :toys, :through => :children + has n, :boogers, :through => :children + has n, :coats, :through => :wife + has 1, :ring, :through => :wife + has n, :schools, :through => :children + end + + class Wife + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :shop_owner + has 1, :ring + has n, :coats + end + + class Coat + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :wife + end + + class Ring + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :wife + end + + class Child + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :shop_owner + has n, :toys + has 1, :booger + end + + class Booger + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :child + end + + class Toy + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :child + end + + class Cake + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :shop + belongs_to :customer + has n, :ultraslices + has n, :bites, :through => :ultraslices + has 1, :recipe + has n, :ingredients, :through => :recipe + has 1, :creator, :through => :recipe + has n, :shapes, :through => :ultraslices + end + + class Recipe + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :cake + has n, :ingredients + has 1, :creator + end + + class Customer + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + has n, :cakes + end + + class Creator + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :recipe + end + + class Ingredient + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :recipe + end + + class Ultraslice + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :size, Integer + belongs_to :cake + has n, :bites + has 1, :shape + end + + class Shape + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :ultraslice + end + + class Bite + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + property :id, Serial + property :name, String + belongs_to :ultraslice + end + + DataMapper::Resource.descendants.each do |descendant| + descendant.auto_migrate!(ADAPTER) if descendant.name =~ /^Sweets::/ + end + + betsys = Shop.new(:name => "Betsy's") + betsys.save + + # + # has n + # + + german_chocolate = Cake.new(:name => 'German Chocolate') + betsys.cakes << german_chocolate + german_chocolate.save + short_cake = Cake.new(:name => 'Short Cake') + betsys.cakes << short_cake + short_cake.save + + # has n => belongs_to + + old_customer = Customer.new(:name => 'John Johnsen') + old_customer.cakes << german_chocolate + old_customer.cakes << short_cake + german_chocolate.save + short_cake.save + old_customer.save + + # has n => has 1 + + schwarzwald = Recipe.new(:name => 'Schwarzwald Cake') + schwarzwald.save + german_chocolate.recipe = schwarzwald + german_chocolate.save + shortys_special = Recipe.new(:name => "Shorty's Special") + shortys_special.save + short_cake.recipe = shortys_special + short_cake.save + + # has n => has 1 => has 1 + + runar = Creator.new(:name => 'Runar') + schwarzwald.creator = runar + runar.save + berit = Creator.new(:name => 'Berit') + shortys_special.creator = berit + berit.save + + # has n => has 1 => has n + + 4.times do |i| schwarzwald.ingredients << Ingredient.new(:name => "Secret ingredient nr #{i}") end + 6.times do |i| shortys_special.ingredients << Ingredient.new(:name => "Well known ingredient nr #{i}") end + + # has n => has n + + 10.times do |i| german_chocolate.ultraslices << Ultraslice.new(:size => i) end + 5.times do |i| short_cake.ultraslices << Ultraslice.new(:size => i) end + german_chocolate.ultraslices.size.should == 10 + # has n => has n => has 1 + + german_chocolate.ultraslices.each do |ultraslice| + shape = Shape.new(:name => 'square') + ultraslice.shape = shape + shape.save + end + short_cake.ultraslices.each do |ultraslice| + shape = Shape.new(:name => 'round') + ultraslice.shape = shape + shape.save + end + + # has n => has n => has n + german_chocolate.ultraslices.each do |ultraslice| + 6.times do |i| + ultraslice.bites << Bite.new(:name => "Big bite nr #{i}") + end + end + short_cake.ultraslices.each do |ultraslice| + 3.times do |i| + ultraslice.bites << Bite.new(:name => "Small bite nr #{i}") + end + end + + # + # has 1 + # + + betsy = ShopOwner.new(:name => 'Betsy') + betsys.shop_owner = betsy + betsys.save + + # has 1 => has 1 + + barry = Wife.new(:name => 'Barry') + betsy.wife = barry + barry.save + + # has 1 => has 1 => has 1 + + golden = Ring.new(:name => 'golden') + barry.ring = golden + golden.save + + # has 1 => has 1 => has n + + 3.times do |i| + barry.coats << Coat.new(:name => "Fancy coat nr #{i}") + end + barry.save + + # has 1 => has n + + 5.times do |i| + betsy.children << Child.new(:name => "Snotling nr #{i}") + end + betsy.save + + # has 1 => has n => has n + + betsy.children.each do |child| + 4.times do |i| + child.toys << Toy.new(:name => "Cheap toy nr #{i}") + end + child.save + end + + # has 1 => has n => has 1 + + betsy.children.each do |child| + booger = Booger.new(:name => 'Nasty booger') + child.booger = booger + child.save + end + end + end + end + + # + # has n + # + + it 'should return the right children for has n => has n relationships' do + Sweets::Shop.first.ultraslices.size.should == 15 + 10.times do |i| + Sweets::Shop.first.ultraslices.select do |ultraslice| + ultraslice.cake == Sweets::Cake.first(:name => 'German Chocolate') && ultraslice.size == i + end + end + end + + it 'should return the right children for has n => has n => has 1' do + Sweets::Shop.first.shapes.size.should == 15 + Sweets::Shop.first.shapes.select do |shape| + shape.name == 'square' + end.size.should == 10 + Sweets::Shop.first.shapes.select do |shape| + shape.name == 'round' + end.size.should == 5 + end + + it 'should return the right children for has n => has n => has n' do + Sweets::Shop.first.bites.size.should == 75 + Sweets::Shop.first.bites.select do |bite| + bite.ultraslice.cake == Sweets::Cake.first(:name => 'German Chocolate') + end.size.should == 60 + Sweets::Shop.first.bites.select do |bite| + bite.ultraslice.cake == Sweets::Cake.first(:name => 'Short Cake') + end.size.should == 15 + end + + it 'should return the right children for has n => belongs_to relationships' do + Sweets::Customer.first.cakes.size.should == 2 + customers = Sweets::Shop.first.customers.select do |customer| + customer.name == 'John Johnsen' + end + customers.size.should == 1 + # another example can be found here: http://pastie.textmate.org/private/tt1hf1syfsytyxdgo4qxawfl + end + + it 'should return the right children for has n => has 1 relationships' do + Sweets::Shop.first.recipes.size.should == 2 + Sweets::Shop.first.recipes.select do |recipe| + recipe.name == 'Schwarzwald Cake' + end.size.should == 1 + Sweets::Shop.first.recipes.select do |recipe| + recipe.name == "Shorty's Special" + end.size.should == 1 + end + + it 'should return the right children for has n => has 1 => has 1 relationships' do + Sweets::Shop.first.creators.size.should == 2 + Sweets::Shop.first.creators.any? do |creator| + creator.name == 'Runar' + end.should == true + Sweets::Shop.first.creators.any? do |creator| + creator.name == 'Berit' + end.should == true + end + + it 'should return the right children for has n => has 1 => has n relationships' do + Sweets::Shop.first.ingredients.size.should == 10 + 4.times do |i| + Sweets::Shop.first.ingredients.any? do |ingredient| + ingredient.name == "Secret ingredient nr #{i}" && ingredient.recipe.cake == Sweets::Cake.first(:name => 'German Chocolate') + end.should == true + end + 6.times do |i| + Sweets::Shop.first.ingredients.any? do |ingredient| + ingredient.name == "Well known ingredient nr #{i}" && ingredient.recipe.cake == Sweets::Cake.first(:name => 'Short Cake') + end.should == true + end + end + + # + # has 1 + # + + it 'should return the right children for has 1 => has 1 relationships' do + Sweets::Shop.first.wife.should == Sweets::Wife.first + end + + it 'should return the right children for has 1 => has 1 => has 1 relationships' do + Sweets::Shop.first.ring.should == Sweets::Ring.first + end + + it 'should return the right children for has 1 => has 1 => has n relationships' do + Sweets::Shop.first.coats.size.should == 3 + 3.times do |i| + Sweets::Shop.first.coats.any? do |coat| + coat.name == "Fancy coat nr #{i}" + end.should == true + end + end + + it 'should return the right children for has 1 => has n relationships' do + Sweets::Shop.first.children.size.should == 5 + 5.times do |i| + Sweets::Shop.first.children.any? do |child| + child.name == "Snotling nr #{i}" + end.should == true + end + end + + it 'should return the right children for has 1 => has n => has 1 relationships' do + Sweets::Shop.first.boogers.size.should == 5 + Sweets::Shop.first.boogers.inject(Set.new) do |sum, booger| + sum << booger.child_id + end.size.should == 5 + end + + it 'should return the right children for has 1 => has n => has n relationships' do + Sweets::Shop.first.toys.size.should == 20 + 5.times do |child_nr| + 4.times do |toy_nr| + Sweets::Shop.first.toys.any? do |toy| + toy.name == "Cheap toy nr #{toy_nr}" && toy.child = Sweets::Child.first(:name => "Snotling nr #{child_nr}") + end.should == true + end + end + end + + # + # misc + # + + it 'should join tables in the right order during has 1 => has n => has 1 queries' do + child = Sweets::Shop.first.children(:name => 'Snotling nr 3').booger(:name.like => 'Nasty booger') + child.should_not be_nil + child.size.should eql(1) + child.first.name.should eql("Nasty booger") + end + + it 'should join tables in the right order for belongs_to relations' do + wife = Sweets::Wife.first(Sweets::Wife.shop_owner.name => "Betsy", Sweets::Wife.shop_owner.shop.name => "Betsy's") + wife.should_not be_nil + wife.name.should eql("Barry") + end + + it 'should raise exception if you try to change it' do + lambda do + Sweets::Shop.first.wife = Sweets::Wife.new(:name => 'Larry') + end.should raise_error(DataMapper::Associations::ImmutableAssociationError) + end + + it 'should be reloaded when calling Resource#reload' do + betsys = Sweets::Shop.first(:name => "Betsy's") + betsys.send(:customers_association).should_receive(:reload).once + lambda { betsys.reload }.should_not raise_error + end + + end + + if false # Many to many not yet implemented + describe "many to many associations" do + before(:all) do + class RightItem + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + + has n..n, :left_items + end + + class LeftItem + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + + has n..n, :right_items + end + + RightItem.auto_migrate! + LeftItem.auto_migrate! + end + + def create_item_pair(number) + @ri = RightItem.new(:name => "ri#{number}") + @li = LeftItem.new(:name => "li#{number}") + end + + it "should add to the association from the left" do + pending "Waiting on Many To Many to be implemented" do + create_item_pair "0000" + @ri.save; @li.save + @ri.should_not be_new_record + @li.should_not be_new_record + + @li.right_items << @ri + @li.right_items.should include(@ri) + @li.reload + @ri.reload + @li.right_items.should include(@ri) + end + end + + it "should add to the association from the right" do + create_item_pair "0010" + @ri.save; @li.save + @ri.should_not be_new_record + @li.should_not be_new_record + + @ri.left_items << @li + @ri.left_items.should include(@li) + @li.reload + @ri.reload + @ri.left_items.should include(@li) + end + + it "should load the associated collection from the either side" do + pending "Waiting on Many To Many to be implemented" do + create_item_pair "0020" + @ri.save; @li.save + @ri.left_items << @li + @ri.reload; @li.reload + + @ri.left_items.should include(@li) + @li.right_items.should include(@ri) + end + end + + it "should load the associated collection from the right" do + pending "Waiting on Many To Many to be implemented" do + create_item_pair "0030" + @ri.save; @li.save + @li.right_items << @li + @ri.reload; @li.reload + + @ri.left_items.should include(@li) + @li.right_items.should include(@ri) + end + end + + it "should save the left side of the association if new record" do + pending "Waiting on Many To Many to be implemented" do + create_item_pair "0040" + @ri.save + @li.should be_new_record + @ri.left_items << @li + @li.should_not be_new_record + end + end + + it "should save the right side of the association if new record" do + pending "Waiting on Many To Many to be implemented" do + create_item_pair "0050" + @li.save + @ri.should be_new_record + @li.right_items << @ri + @ri.should_not be_new_record + end + end + + it "should save both side of the association if new record" do + pending "Waiting on Many To Many to be implemented" do + create_item_pair "0060" + @li.should be_new_record + @ri.should be_new_record + @ri.left_items << @li + @ri.should_not be_new_record + @li.should_not be_new_record + end + end + + it "should remove an item from the left collection without destroying the item" do + pending "Waiting on Many To Many to be implemented" do + create_item_pair "0070" + @li.save; @ri.save + @ri.left_items << @li + @ri.reload; @li.reload + @ri.left_items.should include(@li) + @ri.left_items.delete(@li) + @ri.left_items.should_not include(@li) + @li.reload + LeftItem.get(@li.id).should_not be_nil + end + end + + it "should remove an item from the right collection without destroying the item" do + pending "Waiting on Many To Many to be implemented" do + create_item_pair "0080" + @li.save; @ri.save + @li.right_items << @ri + @li.reload; @ri.reload + @li.right_items.should include(@ri) + @li.right_items.delete(@ri) + @li.right_items.should_not include(@ri) + @ri.reload + RightItem.get(@ri.id).should_not be_nil + end + end + + it "should remove the item from the collection when an item is deleted" do + pending "Waiting on Many To Many to be implemented" do + create_item_pair "0090" + @li.save; @ri.save + @ri.left_items << @li + @ri.reload; @li.reload + @ri.left_items.should include(@li) + @li.destroy + @ri.reload + @ri.left_items.should_not include(@li) + end + end + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/association_through_spec.rb b/vendor/dm-core-0.9.6/spec/integration/association_through_spec.rb new file mode 100644 index 0000000..179f084 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/association_through_spec.rb @@ -0,0 +1,203 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +if ADAPTER + describe 'through-associations' do + before :all do + repository(ADAPTER) do + class Tag + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :title, String + property :voided, Boolean, :default => false + + has n, :taggings + + has n, :relationships + has n, :related_posts, :through => :relationships, :class_name => 'Post', :child_key => [:post_id] + + has n, :posts, :through => :taggings + end + + class Tagging + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :title, String + + belongs_to :post + belongs_to :tag + end + + class Post + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :title, String + + has n, :taggings + has n, :tags, :through => :taggings + + has n, :relationships + has n, :related_posts, + :through => :relationships, + :child_key => [:post_id], + :class_name => "Post" + + has n, :void_tags, + :through => :taggings, + :child_key => [:post_id], + :class_name => "Tag", + :remote_relationship_name => :tag, + Post.taggings.tag.voided => true + end + + class Relationship + include DataMapper::Resource + def self.default_repository_name + ADAPTER + end + + property :id, Serial + belongs_to :post + belongs_to :related_post, :class_name => "Post", :child_key => [:related_post_id] + end + + [Post, Tag, Tagging, Relationship].each do |descendant| + descendant.auto_migrate!(ADAPTER) + end + end + end + + describe '(sample data)' do + before(:each) do + post = Post.create(:title => "Entry") + another_post = Post.create(:title => "Another") + + crappy = Tagging.new + post.taggings << crappy + post.save + + crap = Tag.create(:title => "crap") + crap.taggings << crappy + crap.save + + crappier = Tagging.new + post.taggings << crappier + post.save + + crapz = Tag.create(:title => "crapz", :voided => true) + crapz.taggings << crappier + crapz.save + + goody = Tagging.new + another_post.taggings << goody + another_post.save + + good = Tag.create(:title => "good") + good.taggings << goody + good.save + + relation = Relationship.new(:related_post_id => another_post.id) + post.relationships << relation + post.save + end + + it 'should return the right children for has n => belongs_to relationships' do + Post.first.tags.select do |tag| + tag.title == 'crap' + end.size.should == 1 + end + + it 'should return the right children for has n => belongs_to self-referential relationships' do + Post.first.related_posts.select do |post| + post.title == 'Another' + end.size.should == 1 + end + + it 'should handle all()' do + related_posts = Post.first.related_posts + related_posts.all.object_id.should == related_posts.object_id + related_posts.all(:id => 2).first.should == Post.get!(2) + end + + it 'should handle first()' do + post = Post.get!(2) + related_posts = Post.first.related_posts + related_posts.first.should == post + related_posts.first(10).should == [ post ] + related_posts.first(:id => 2).should == post + related_posts.first(10, :id => 2).map { |r| r.id }.should == [post.id] + end + + it 'should handle get()' do + post = Post.get!(2) + related_posts = Post.first.related_posts + related_posts.get(2).should == post + end + + it 'should proxy object should be frozen' do + Post.first.related_posts.should be_frozen + end + + it "should respect tagging with conditions" do + post = Post.get(1) + post.tags.size + post.tags.select{ |t| t.voided == true }.size.should == 1 + post.void_tags.size.should == 1 + post.void_tags.all?{ |t| t.voided == true }.should be_true + end + end + + describe "Saved Tag, Post, Tagging" do + before(:each) do + @tag = Tag.create + @post = Post.create + @tagging = Tagging.create( + :tag => @tag, + :post => @post + ) + end + + it "should get posts of a tag" do + @tag.posts.should == [@post] + end + + it "should get tags of a post" do + @post.tags.should == [@tag] + end + end + + describe "In-memory Tag, Post, Tagging" do + before(:each) do + @tag = Tag.new + @post = Post.new + @tagging = Tagging.new( + :tag => @tag, + :post => @post + ) + end + + it "should get posts of a tag" do + pending("DataMapper does not yet support in-memory associations") do + @tag.posts.should == [@post] + end + end + + it "should get tags of a post" do + pending("DataMapper does not yet support in-memory associations") do + @post.tags.should == [@tag] + end + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/associations/many_to_many_spec.rb b/vendor/dm-core-0.9.6/spec/integration/associations/many_to_many_spec.rb new file mode 100644 index 0000000..db39971 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/associations/many_to_many_spec.rb @@ -0,0 +1,449 @@ +require File.expand_path(File.join(File.dirname(__FILE__), "..", "..", "spec_helper")) + +describe DataMapper::Associations::ManyToMany::Proxy do + before :all do + class Editor + include DataMapper::Resource + + def self.default_repository_name; ADAPTER end + + property :id, Serial + property :name, String + + has n, :books, :through => Resource + end + + Object.send(:remove_const, :Book) if defined?(Book) + + class Book + include DataMapper::Resource + + def self.default_repository_name; ADAPTER end + + property :id, Serial + property :title, String + + has n, :editors, :through => Resource + end + end + + before do + [ Book, Editor, BookEditor ].each { |k| k.auto_migrate! } + + repository(ADAPTER) do + book_1 = Book.create(:title => "Dubliners") + book_2 = Book.create(:title => "Portrait of the Artist as a Young Man") + book_3 = Book.create(:title => "Ulysses") + + editor_1 = Editor.create(:name => "Jon Doe") + editor_2 = Editor.create(:name => "Jane Doe") + + BookEditor.create(:book => book_1, :editor => editor_1) + BookEditor.create(:book => book_2, :editor => editor_1) + BookEditor.create(:book => book_1, :editor => editor_2) + + @parent = book_3 + @association = @parent.editors + @other = [ editor_1 ] + end + end + + it "should provide #replace" do + @association.should respond_to(:replace) + end + + describe "#replace" do + it "should remove the resource from the collection" do + @association.should have(0).entries + @association.replace(@other) + @association.should == @other + end + + it "should not automatically save that the resource was removed from the association" do + @association.replace(@other) + @parent.reload.should have(0).editors + end + + it "should return the association" do + @association.replace(@other).object_id.should == @association.object_id + end + + it "should add the new resources so they will be saved when saving the parent" do + @association.replace(@other) + @association.should == @other + @parent.save + @association.reload.should == @other + end + + it "should instantiate the remote model if passed an array of hashes" do + @association.replace([ { :name => "Jim Smith" } ]) + other = [ Editor.first(:name => "Jim Smith") ] + other.first.should_not be_nil + @association.should == other + @parent.save + @association.reload.should == other + end + end + + it "should correctly link records" do + Book.get(1).should have(2).editors + Book.get(2).should have(1).editors + Book.get(3).should have(0).editors + Editor.get(1).should have(2).books + Editor.get(2).should have(1).books + end + + it "should be able to have associated objects manually added" do + book = Book.get(3) + book.should have(0).editors + + be = BookEditor.new(:book_id => book.id, :editor_id => 2) + book.book_editors << be + book.save + + book.reload.should have(1).editors + end + + it "should automatically added necessary through class" do + book = Book.get(3) + book.should have(0).editors + + book.editors << Editor.get(1) + book.editors << Editor.new(:name => "Jimmy John") + book.save + + book.reload.should have(2).editors + end + + it "should react correctly to a new record" do + book = Book.new(:title => "Finnegan's Wake") + editor = Editor.get(2) + book.should have(0).editors + editor.should have(1).books + + book.editors << editor + book.save + + book.reload.should have(1).editors + editor.reload.should have(2).books + end + + it "should be able to delete intermediate model" do + book = Book.get(1) + book.should have(2).book_editors + book.should have(2).editors + + be = BookEditor.get(1,1) + book.book_editors.delete(be) + book.save + + book.reload + book.should have(1).book_editors + book.should have(1).editors + end + + it "should be clearable" do + repository(ADAPTER) do + book = Book.get(2) + book.should have(1).book_editors + book.should have(1).editors + + book.editors.clear + book.save + + book.reload + book.should have(0).book_editors + book.should have(0).editors + end + repository(ADAPTER) do + Book.get(2).should have(0).editors + end + end + + it "should be able to delete one object" do + book = Book.get(1) + book.should have(2).book_editors + book.should have(2).editors + + editor = book.editors.first + book.editors.delete(editor) + book.save + + book.reload + book.should have(1).book_editors + book.should have(1).editors + editor.reload.books.should_not include(book) + end + + it "should be destroyable" do + pending "cannot destroy a collection yet" do + book = Book.get(2) + book.should have(1).editors + + book.editors.destroy + book.save + + book.reload + book.should have(0).editors + end + end + + describe "with natural keys" do + before :all do + class Author + include DataMapper::Resource + + def self.default_repository_name; ADAPTER end + + property :name, String, :key => true + + has n, :books, :through => Resource + end + + class Book + has n, :authors, :through => Resource + end + end + + before do + [ Author, AuthorBook ].each { |k| k.auto_migrate! } + + @author = Author.create(:name => "James Joyce") + + @book_1 = Book.get!(1) + @book_2 = Book.get!(2) + @book_3 = Book.get!(3) + + AuthorBook.create(:book => @book_1, :author => @author) + AuthorBook.create(:book => @book_2, :author => @author) + AuthorBook.create(:book => @book_3, :author => @author) + end + + it "should have a join resource where the natural key is a property" do + AuthorBook.properties[:author_name].primitive.should == String + end + + it "should have a join resource where every property is part of the key" do + AuthorBook.key.should == AuthorBook.properties.to_a + end + + it "should correctly link records" do + @author.should have(3).books + @book_1.should have(1).authors + @book_2.should have(1).authors + @book_3.should have(1).authors + end + end + + describe "When join model has non-serial (integer) natural keys." do + before :all do + class Tag + include DataMapper::Resource + + def self.default_repository_name; ADAPTER end + + property :id, Serial + property :name, String, :size => 128 + + has n, :book_taggings + has n, :books, :through => :book_taggings + end + + class BookTagging + include DataMapper::Resource + + def self.default_repository_name; ADAPTER end + + property :book_id, Integer, :key => true + property :tag_id, Integer, :key => true + + belongs_to :book + belongs_to :tag + end + + class Book + has n, :book_taggings + has n, :tags, :through => :book_taggings + end + end + + before do + [ Tag, BookTagging ].each { |k| k.auto_migrate! } + + @tag_1 = Tag.create(:name => "good") + @tag_2 = Tag.create(:name => "long") + + @book_1 = Book.get!(1) + @book_2 = Book.get!(2) + @book_3 = Book.get!(3) + + BookTagging.create(:book => @book_2, :tag => @tag_1) + BookTagging.create(:book => @book_2, :tag => @tag_2) + BookTagging.create(:book => @book_3, :tag => @tag_2) + end + + it "should fetch all tags for a book" do + @book_1.tags.should have(0).tags + @book_2.tags.should have(2).tags + @book_3.tags.should have(1).tags + end + + it "should allow for adding an association using the << operator" do + @book_1.book_taggings << @tag_1 + @book_1.tags.should have(0).tags + end + end + + describe "with renamed associations" do + before :all do + class Singer + include DataMapper::Resource + + def self.default_repository_name; ADAPTER end + + property :id, Serial + property :name, String + + has n, :tunes, :through => Resource, :class_name => 'Song' + end + + class Song + include DataMapper::Resource + + def self.default_repository_name; ADAPTER end + + property :id, Serial + property :title, String + + has n, :performers, :through => Resource, :class_name => 'Singer' + end + end + + before do + [ Singer, Song, SingerSong ].each { |k| k.auto_migrate! } + + song_1 = Song.create(:title => "Dubliners") + song_2 = Song.create(:title => "Portrait of the Artist as a Young Man") + song_3 = Song.create(:title => "Ulysses") + + singer_1 = Singer.create(:name => "Jon Doe") + singer_2 = Singer.create(:name => "Jane Doe") + + SingerSong.create(:song => song_1, :singer => singer_1) + SingerSong.create(:song => song_2, :singer => singer_1) + SingerSong.create(:song => song_1, :singer => singer_2) + + @parent = song_3 + @association = @parent.performers + @other = [ singer_1 ] + end + + it "should provide #replace" do + @association.should respond_to(:replace) + end + + it "should correctly link records" do + Song.get(1).should have(2).performers + Song.get(2).should have(1).performers + Song.get(3).should have(0).performers + Singer.get(1).should have(2).tunes + Singer.get(2).should have(1).tunes + end + + it "should be able to have associated objects manually added" do + song = Song.get(3) + song.should have(0).performers + + be = SingerSong.new(:song_id => song.id, :singer_id => 2) + song.singer_songs << be + song.save + + song.reload.should have(1).performers + end + + it "should automatically added necessary through class" do + song = Song.get(3) + song.should have(0).performers + + song.performers << Singer.get(1) + song.performers << Singer.new(:name => "Jimmy John") + song.save + + song.reload.should have(2).performers + end + + it "should react correctly to a new record" do + song = Song.new(:title => "Finnegan's Wake") + singer = Singer.get(2) + song.should have(0).performers + singer.should have(1).tunes + + song.performers << singer + song.save + + song.reload.should have(1).performers + singer.reload.should have(2).tunes + end + + it "should be able to delete intermediate model" do + song = Song.get(1) + song.should have(2).singer_songs + song.should have(2).performers + + be = SingerSong.get(1,1) + song.singer_songs.delete(be) + song.save + + song.reload + song.should have(1).singer_songs + song.should have(1).performers + end + + it "should be clearable" do + repository(ADAPTER) do + song = Song.get(2) + song.should have(1).singer_songs + song.should have(1).performers + + song.performers.clear + song.save + + song.reload + song.should have(0).singer_songs + song.should have(0).performers + end + repository(ADAPTER) do + Song.get(2).should have(0).performers + end + end + + it "should be able to delete one object" do + song = Song.get(1) + song.should have(2).singer_songs + song.should have(2).performers + + editor = song.performers.first + song.performers.delete(editor) + song.save + + song.reload + song.should have(1).singer_songs + song.should have(1).performers + editor.reload.tunes.should_not include(song) + end + + it "should be destroyable" do + pending "cannot destroy a collection yet" do + song = Song.get(2) + song.should have(1).performers + + song.performers.destroy + song.save + + song.reload + song.should have(0).performers + end + end + end + +end diff --git a/vendor/dm-core-0.9.6/spec/integration/associations/many_to_one_spec.rb b/vendor/dm-core-0.9.6/spec/integration/associations/many_to_one_spec.rb new file mode 100644 index 0000000..5e4de90 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/associations/many_to_one_spec.rb @@ -0,0 +1,163 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'spec_helper')) + +if ADAPTER + module ManyToOneSpec + class Parent + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + end + + class Child + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + property :type, Discriminator + + belongs_to :parent + end + + class StepChild < Child + end + end + + describe DataMapper::Associations::ManyToOne::Proxy do + before do + [ ManyToOneSpec::Parent, ManyToOneSpec::Child ].each { |model| model.auto_migrate! } + + repository(ADAPTER) do + @parent = ManyToOneSpec::Parent.create(:name => 'parent') + @child = ManyToOneSpec::Child.create(:name => 'child', :parent => @parent) + @other = ManyToOneSpec::Parent.create(:name => 'other parent') + @step_child = ManyToOneSpec::StepChild.create(:name => 'step child', :parent => @other) + @association = @child.parent + end + end + + describe "#association_accessor (STI)" do + include LoggingHelper + + it "should set parent" do + ManyToOneSpec::StepChild.first(:id => @step_child.id).parent.should == @other + end + + it "should use the identity map for STI" do + repository(ADAPTER) do |r| + parent = ManyToOneSpec::Parent.first(:id => @parent.id) + child = ManyToOneSpec::Child.first(:id => @child.id) + step_child = ManyToOneSpec::StepChild.first(:id => @step_child.id) + logger do |log| + # should retrieve from the IdentityMap + child.parent.object_id.should == parent.object_id + + # should retrieve from the datasource + other = step_child.parent + + # should retrieve from the IdentityMap + step_child.parent.should == @other + step_child.parent.object_id.should == other.object_id + + log.readlines.size.should == 1 + end + end + end + end + + describe '#replace' do + it 'should remove the resource from the collection' do + @association.should == @parent + @association.replace(@other) + @association.should == @other + end + + it 'should not automatically save that the resource was removed from the association' do + @association.replace(@other) + @child.reload.parent.should == @parent + end + + it 'should return the association' do + @association.replace(@other).object_id.should == @association.object_id + end + end + + describe '#save' do + describe 'when the parent is nil' do + before do + @association.replace(nil) + end + + it 'should not save the parent' do + @association.save + end + + it 'should return false' do + @association.save.should == false + end + end + + describe 'when the parent is not a new record' do + before do + @parent.should_not be_new_record + @child.should_not be_new_record + end + + it 'should not save the parent' do + @parent.should_not_receive(:save) + @association.save + end + + it 'should return true' do + @association.save.should == true + end + + it "should return true to the child" do + @child.save.should == true + end + end + + describe 'when the parent is a new record' do + before do + @parent = ManyToOneSpec::Parent.new(:name => 'unsaved parent') + @parent.should be_new_record + @association.replace(@parent) + end + + it 'should save the parent' do + @association.save + @parent.should_not be_new_record + end + + it 'should return the result of the save' do + @association.save.should == true + end + end + end + + describe '#reload' do + before do + @child.parent_id.should == @parent.id + @association.replace(@other) + end + + it 'should not change the foreign key in the child' do + @child.parent_id.should == @other.id + @association.reload + @child.parent_id.should == @other.id + end + + it 'should return self' do + @association.reload.object_id.should == @association.object_id + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/associations/one_to_many_spec.rb b/vendor/dm-core-0.9.6/spec/integration/associations/one_to_many_spec.rb new file mode 100644 index 0000000..8df039f --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/associations/one_to_many_spec.rb @@ -0,0 +1,188 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'spec_helper')) +require 'pp' +describe "OneToMany" do + before(:all) do + class Team + include DataMapper::Resource + + def self.default_repository_name; ADAPTER end + + property :id, Serial + property :name, String + property :class_type, Discriminator + + has n, :players + end + + class BaseballTeam < Team + end + + class Player + include DataMapper::Resource + + def self.default_repository_name; ADAPTER end + + property :id, Serial + property :name, String + + belongs_to :team + end + + [Team, Player].each { |k| k.auto_migrate!(ADAPTER) } + + Team.create(:name => "Cowboys") + BaseballTeam.create(:name => "Giants") + end + + describe "(saved parent, saved child)" do + before(:each) do + @dc_united = Team.create + @emilio = Player.create(:team => @dc_united) + end + + it "child association should return parent" do + @emilio.team.should == @dc_united + end + + it "parent association should return children" do + @dc_united.players.should == [@emilio] + end + end + + describe "(saved parent, unsaved child)" do + before(:each) do + @dc_united = Team.create + @emilio = Player.new(:team => @dc_united) + end + + it "child association should return parent" do + @emilio.team.should == @dc_united + end + + it "parent association should return children" do + pending("DataMapper does not yet support in-memory associations") do + @dc_united.players.should == [@emilio] + end + end + end + + describe "(unsaved parent, saved child)" do + before(:each) do + @dc_united = Team.new + @emilio = Player.create(:team => @dc_united) + end + + it "child association should return parent" do + @emilio.team.should == @dc_united + end + + it "parent association should return children" do + @dc_united.players.should == [@emilio] + end + + it "should return true to child.save" do + @emilio.should_not be_a_new_record + @emilio.save.should be_true + end + end + + describe "(unsaved parent, unsaved child)" do + before(:each) do + @dc_united = Team.new + @emilio = Player.new(:team => @dc_united) + end + + it "child association should return parent" do + @emilio.team.should == @dc_united + end + + it "parent association should return children" do + pending("DataMapper does not yet support in-memory associations") do + @dc_united.players.should == [@emilio] + end + end + end + + describe "parent initialized child" do + before(:each) do + @ajax = Team.create + @vandesar = @ajax.players.new + @vandesar.save + end + + it "child association should return parent" do + @vandesar.team.should == @ajax + end + + it "parent association should return children" do + @ajax.players.should == [@vandesar] + end + end + + it "unsaved parent model should accept array of hashes for association" do + players = [{ :name => "Brett Favre" }, { :name => "Reggie White" }] + + team = Team.new(:name => "Packers", :players => players) + team.players.zip(players) do |player, values| + player.should be_an_instance_of(Player) + values.each { |k, v| player.send(k).should == v } + end + + players = team.players + team.save + + repository(ADAPTER) do + Team.get(3).players.should == players + end + end + + it "saved parent model should accept array of hashes for association" do + players = [{ :name => "Troy Aikman" }, { :name => "Chad Hennings" }] + + team = Team.get(1) + team.players = players + team.players.zip(players) do |player, values| + player.should be_an_instance_of(Player) + values.each { |k, v| player.send(k).should == v } + end + + players = team.players + team.save + + repository(ADAPTER) do + Team.get(1).players.should == players + end + end + + describe "STI" do + before(:all) do + repository(ADAPTER) do + @player = Player.create(:name => "Barry Bonds", :team => BaseballTeam.first) + end + end + + it "should work for child.parent" do + repository(ADAPTER) do + @player.team.should == BaseballTeam.first + end + end + + it "should work for parent.children" do + repository(ADAPTER) do + team = BaseballTeam.first + + team.players.size.should > 0 + team.players.each{|p| p.should be_an_instance_of(Player)} + end + end + end + + describe "alone" do + it "should work for parent.children without any parents in IM" do + repository(ADAPTER) do + team = BaseballTeam.first + team.players.each{|p| p.should be_an_instance_of(Player)} + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/auto_migrations_spec.rb b/vendor/dm-core-0.9.6/spec/integration/auto_migrations_spec.rb new file mode 100755 index 0000000..4002d48 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/auto_migrations_spec.rb @@ -0,0 +1,398 @@ +require 'pathname' +require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper' +require 'ostruct' + +TODAY = Date.today +NOW = DateTime.now + +TIME_STRING_1 = '2007-04-21 04:14:12' +TIME_STRING_2 = '2007-04-21 04:14:12.1' +TIME_STRING_3 = '2007-04-21 04:14:12.01' +TIME_STRING_4 = '2007-04-21 04:14:12.123456' + +TIME_1 = Time.parse(TIME_STRING_1) +TIME_2 = Time.parse(TIME_STRING_2) +TIME_3 = Time.parse(TIME_STRING_3) +TIME_4 = Time.parse(TIME_STRING_4) + +class EveryType + include DataMapper::Resource + + property :serial, Serial + property :fixnum, Integer, :nullable => false, :default => 1 + property :string, String, :nullable => false, :default => 'default' + property :empty, String, :nullable => false, :default => '' + property :date, Date, :nullable => false, :default => TODAY, :index => :date_date_time, :unique_index => :date_float + property :true_class, TrueClass, :nullable => false, :default => true + property :false_class, TrueClass, :nullable => false, :default => false + property :text, DM::Text, :nullable => false, :default => 'text' +# property :class, Class, :nullable => false, :default => Class # FIXME: Class types cause infinite recursions in Resource + property :big_decimal, BigDecimal, :nullable => false, :default => BigDecimal('1.1'), :precision => 2, :scale => 1 + property :float, Float, :nullable => false, :default => 1.1, :precision => 2, :scale => 1, :unique_index => :date_float + property :date_time, DateTime, :nullable => false, :default => NOW, :index => [:date_date_time, true] + property :time_1, Time, :nullable => false, :default => TIME_1, :unique_index => true + property :time_2, Time, :nullable => false, :default => TIME_2 + property :time_3, Time, :nullable => false, :default => TIME_3 + property :time_4, Time, :nullable => false, :default => TIME_4 + property :object, Object, :nullable => true # FIXME: cannot supply a default for Object + property :discriminator, DM::Discriminator +end + +module Publications + class ShortStoryCollection + include DataMapper::Resource + property :serial, Serial + property :date, Date, :nullable => false, :default => TODAY, :index => :date_date_time + end +end + +if HAS_SQLITE3 + describe DataMapper::AutoMigrations, '.auto_migrate! with sqlite3' do + before :all do + @adapter = repository(:sqlite3).adapter + + DataMapper::Resource.descendants.clear + + @property_class = Struct.new(:name, :type, :nullable, :default, :serial) + end + + after :all do + DataMapper::Resource.descendants.clear + end + + describe 'with sqlite3' do + before :all do + EveryType.auto_migrate!(:sqlite3).should be_true + + @table_set = @adapter.query('PRAGMA table_info(?)', 'every_types').inject({}) do |ts,column| + default = if 'NULL' == column.dflt_value || column.dflt_value.nil? + nil + else + /^(['"]?)(.*)\1$/.match(column.dflt_value)[2] + end + + property = @property_class.new( + column.name, + column.type.upcase, + column.notnull == 0, + default, + column.pk == 1 # in SQLite3 the serial key is also primary + ) + + ts.update(property.name => property) + end + + @index_list = @adapter.query('PRAGMA index_list(?)', 'every_types') + + # bypass DM to create the record using only the column default values + @adapter.execute('INSERT INTO "every_types" ("serial", "discriminator") VALUES (?, ?)', 1, EveryType) + + @book = repository(:sqlite3) { EveryType.first } + end + + types = { + :serial => [ Integer, 'INTEGER', false, nil, 1, true ], + :fixnum => [ Integer, 'INTEGER', false, '1', 1, false ], + :string => [ String, 'VARCHAR(50)', false, 'default', 'default', false ], + :empty => [ String, 'VARCHAR(50)', false, '', '' , false ], + :date => [ Date, 'DATE', false, TODAY.strftime('%Y-%m-%d'), TODAY, false ], + :true_class => [ TrueClass, 'BOOLEAN', false, 't', true, false ], + :false_class => [ TrueClass, 'BOOLEAN', false, 'f', false, false ], + :text => [ DM::Text, 'TEXT', false, 'text', 'text', false ], +# :class => [ Class, 'VARCHAR(50)', false, 'Class', 'Class', false ], + :big_decimal => [ BigDecimal, 'DECIMAL(2,1)', false, '1.1', BigDecimal('1.1'), false ], + :float => [ Float, 'FLOAT(2,1)', false, '1.1', 1.1, false ], + :date_time => [ DateTime, 'DATETIME', false, NOW.strftime('%Y-%m-%d %H:%M:%S'), NOW, false ], + :time_1 => [ Time, 'TIMESTAMP', false, TIME_STRING_1, TIME_1, false ], +#SQLite pads out the microseconds to the full 6 digits no matter what the value is - we simply pad up the zeros needed + :time_2 => [ Time, 'TIMESTAMP', false, TIME_STRING_2.dup << '00000', TIME_2, false ], + :time_3 => [ Time, 'TIMESTAMP', false, TIME_STRING_3.dup << '0000', TIME_3, false ], + :time_4 => [ Time, 'TIMESTAMP', false, TIME_STRING_4, TIME_4, false ], + :object => [ Object, 'TEXT', true, nil, nil, false ], + :discriminator => [ DM::Discriminator, 'VARCHAR(50)', false, nil, EveryType, false ], + } + + types.each do |name,(klass,type,nullable,default,key)| + describe "a #{klass} property" do + it "should be created as a #{type}" do + @table_set[name.to_s].type.should == type + end + + it "should #{!nullable && 'not'} be nullable".squeeze(' ') do + @table_set[name.to_s].nullable.should == nullable + end + + it "should have a default value #{default.inspect}" do + @table_set[name.to_s].default.should == default + end + + expected_value = types[name][4] + it 'should properly typecast value' do + if DateTime == klass + @book.send(name).to_s.should == expected_value.to_s + else + @book.send(name).should == expected_value + end + end + end + end + + it 'should have 4 indexes: 2 non-unique index, 2 unique index' do + @index_list.size.should == 4 + + expected_indices = { + "unique_index_every_types_date_float" => 1, + "unique_index_every_types_time_1" => 1, + "index_every_types_date_date_time" => 0, + "index_every_types_date_time" => 0 + } + + @index_list.each do |index| + expected_indices.should have_key(index.name) + expected_indices[index.name].should == index.unique + end + end + + it 'should escape a namespaced model' do + Publications::ShortStoryCollection.auto_migrate!(:sqlite3).should be_true + @adapter.query('SELECT "name" FROM "sqlite_master" WHERE type = ?', 'table').should include('publications_short_story_collections') + end + end + end +end + +if HAS_MYSQL + describe DataMapper::AutoMigrations, '.auto_migrate! with mysql' do + before :all do + @adapter = repository(:mysql).adapter + + DataMapper::Resource.descendants.clear + + @property_class = Struct.new(:name, :type, :nullable, :default, :serial) + end + + after :all do + DataMapper::Resource.descendants.clear + end + + describe 'with mysql' do# + before :all do + EveryType.auto_migrate!(:mysql).should be_true + + @table_set = @adapter.query('DESCRIBE `every_types`').inject({}) do |ts,column| + property = @property_class.new( + column.field, + column.type.upcase, + column.null == 'YES', + column.type.upcase == 'TEXT' ? nil : column.default, + column.extra.split.include?('auto_increment') + ) + + ts.update(property.name => property) + end + + @index_list = @adapter.query('SHOW INDEX FROM `every_types`') + + # bypass DM to create the record using only the column default values + @adapter.execute('INSERT INTO `every_types` (`serial`, `text`, `discriminator`) VALUES (?, ?, ?)', 1, 'text', EveryType) + + @book = repository(:mysql) { EveryType.first } + end + + types = { + :serial => [ Integer, 'INT(11)', false, nil, 1, true ], + :fixnum => [ Integer, 'INT(11)', false, '1', 1, false ], + :string => [ String, 'VARCHAR(50)', false, 'default', 'default', false ], + :empty => [ String, 'VARCHAR(50)', false, '', '', false ], + :date => [ Date, 'DATE', false, TODAY.strftime('%Y-%m-%d'), TODAY, false ], + :true_class => [ TrueClass, 'TINYINT(1)', false, '1', true, false ], + :false_class => [ TrueClass, 'TINYINT(1)', false, '0', false, false ], + :text => [ DM::Text, 'TEXT', false, nil, 'text', false ], +# :class => [ Class, 'VARCHAR(50)', false, 'Class', 'Class', false ], + :big_decimal => [ BigDecimal, 'DECIMAL(2,1)', false, '1.1', BigDecimal('1.1'), false ], + :float => [ Float, 'FLOAT(2,1)', false, '1.1', 1.1, false ], + :date_time => [ DateTime, 'DATETIME', false, NOW.strftime('%Y-%m-%d %H:%M:%S'), NOW, false ], + :time_1 => [ Time, 'TIMESTAMP', false, TIME_1.strftime('%Y-%m-%d %H:%M:%S'), TIME_1, false ], + :time_2 => [ Time, 'TIMESTAMP', false, TIME_2.strftime('%Y-%m-%d %H:%M:%S'), TIME_2, false ], + :time_3 => [ Time, 'TIMESTAMP', false, TIME_3.strftime('%Y-%m-%d %H:%M:%S'), TIME_3 , false ], + :time_4 => [ Time, 'TIMESTAMP', false, TIME_4.strftime('%Y-%m-%d %H:%M:%S'), TIME_4 , false ], + :object => [ Object, 'TEXT', true, nil, nil, false ], + :discriminator => [ DM::Discriminator, 'VARCHAR(50)', false, nil, EveryType, false ], + } + + types.each do |name,(klass,type,nullable,default,key)| + describe "a #{klass} property" do + it "should be created as a #{type}" do + @table_set[name.to_s].type.should == type + end + + it "should #{!nullable && 'not'} be nullable".squeeze(' ') do + @table_set[name.to_s].nullable.should == nullable + end + + it "should have a default value #{default.inspect}" do + @table_set[name.to_s].default.should == default + end + + expected_value = types[name][4] + it 'should properly typecast value' do + if DateTime == klass || Time == klass # mysql doesn't support microsecond + @book.send(name).to_s.should == expected_value.to_s + else + @book.send(name).should == expected_value + end + end + end + end + + it 'should have 4 indexes: 2 non-unique index, 2 unique index' do + pending do + # TODO + @index_list[0].Key_name.should == 'unique_index_every_types_date_float' + @index_list[0].Non_unique.should == 0 + @index_list[1].Key_name.should == 'unique_index_every_types_time_1' + @index_list[1].Non_unique.should == 0 + @index_list[2].Key_name.should == 'index_every_types_date_date_time' + @index_list[2].Non_unique.should == 1 + @index_list[3].Key_name.should == 'index_every_types_date_time' + @index_list[3].Non_unique.should == 1 + end + end + + it 'should escape a namespaced model' do + Publications::ShortStoryCollection.auto_migrate!(:mysql).should be_true + @adapter.query('SHOW TABLES').should include('publications_short_story_collections') + end + end + end +end + +if HAS_POSTGRES + describe DataMapper::AutoMigrations, '.auto_migrate! with postgres' do + before :all do + @adapter = repository(:postgres).adapter + + DataMapper::Resource.descendants.clear + + @property_class = Struct.new(:name, :type, :nullable, :default, :serial) + end + + after :all do + DataMapper::Resource.descendants.clear + end + + describe 'with postgres' do + before :all do + EveryType.auto_migrate!(:postgres).should be_true + + query = <<-EOS + SELECT + -- Field + "pg_attribute"."attname" AS "Field", + -- Type + CASE "pg_type"."typname" + WHEN 'varchar' THEN 'varchar' + ELSE "pg_type"."typname" + END AS "Type", + -- Null + CASE WHEN "pg_attribute"."attnotnull" THEN '' + ELSE 'YES' + END AS "Null", + -- Default + "pg_attrdef"."adsrc" AS "Default" + FROM "pg_class" + INNER JOIN "pg_attribute" + ON ("pg_class"."oid" = "pg_attribute"."attrelid") + INNER JOIN pg_type + ON ("pg_attribute"."atttypid" = "pg_type"."oid") + LEFT JOIN "pg_attrdef" + ON ("pg_class"."oid" = "pg_attrdef"."adrelid" AND "pg_attribute"."attnum" = "pg_attrdef"."adnum") + WHERE "pg_class"."relname" = ? AND "pg_attribute"."attnum" >= ? AND NOT "pg_attribute"."attisdropped" + ORDER BY "pg_attribute"."attnum" + EOS + + @table_set = @adapter.query(query, 'every_types', 1).inject({}) do |ts,column| + default = column.default + serial = false + + if column.default == "nextval('every_types_serial_seq'::regclass)" + default = nil + serial = true + end + + property = @property_class.new( + column.field, + column.type.upcase, + column.null == 'YES', + default, + serial + ) + + ts.update(property.name => property) + end + + # bypass DM to create the record using only the column default values + @adapter.execute('INSERT INTO "every_types" ("serial", "discriminator") VALUES (?, ?)', 1, EveryType) + + @book = repository(:postgres) { EveryType.first } + end + + types = { + :serial => [ Integer, 'INT4', false, nil, 1, true ], + :fixnum => [ Integer, 'INT4', false, '1', 1, false ], + :string => [ String, 'VARCHAR', false, "'default'::character varying", 'default', false ], + :empty => [ String, 'VARCHAR', false, "''::character varying", '', false ], + :date => [ Date, 'DATE', false, "'#{TODAY.strftime('%Y-%m-%d')}'::date", TODAY, false ], + :true_class => [ TrueClass, 'BOOL', false, 'true', true, false ], + :false_class => [ TrueClass, 'BOOL', false, 'false', false, false ], + :text => [ DM::Text, 'TEXT', false, "'text'::text", 'text', false ], +# :class => [ Class, 'VARCHAR(50)', false, 'Class', 'Class', false ], + :big_decimal => [ BigDecimal, 'NUMERIC', false, '1.1', BigDecimal('1.1'), false ], + :float => [ Float, 'FLOAT8', false, '1.1', 1.1, false ], + :date_time => [ DateTime, 'TIMESTAMP', false, "'#{NOW.strftime('%Y-%m-%d %H:%M:%S')}'::timestamp without time zone", NOW, false ], + :time_1 => [ Time, 'TIMESTAMP', false, "'" << TIME_STRING_1.dup << "'::timestamp without time zone", TIME_1, false ], +#The weird zero here is simply because postgresql seems to want to store .10 instead of .1 for this one +#affects anything with an exact tenth of a second (i.e. .1, .2, .3, ...) + :time_2 => [ Time, 'TIMESTAMP', false, "'" << TIME_STRING_2.dup << "0'::timestamp without time zone", TIME_2, false ], + :time_3 => [ Time, 'TIMESTAMP', false, "'" << TIME_STRING_3.dup << "'::timestamp without time zone", TIME_3, false ], + :time_4 => [ Time, 'TIMESTAMP', false, "'" << TIME_STRING_4.dup << "'::timestamp without time zone", TIME_4, false ], + :object => [ Object, 'TEXT', true, nil, nil, false ], + :discriminator => [ DM::Discriminator, 'VARCHAR', false, nil, EveryType, false ], + } + + types.each do |name,(klass,type,nullable,default,key)| + describe "a #{Extlib::Inflection.classify(name.to_s)} property" do + it "should be created as a #{type}" do + @table_set[name.to_s].type.should == type + end + + it "should #{!nullable && 'not'} be nullable".squeeze(' ') do + @table_set[name.to_s].nullable.should == nullable + end + + it "should have a default value #{default.inspect}" do + @table_set[name.to_s].default.should == default + end + + expected_value = types[name][4] + it 'should properly typecast value' do + if DateTime == klass + @book.send(name).to_s.should == expected_value.to_s + else + @book.send(name).should == expected_value + end + end + end + end + + it 'should have 4 indexes: 2 non-unique index, 2 unique index' do + pending 'TODO' + end + + it 'should escape a namespaced model' do + Publications::ShortStoryCollection.auto_migrate!(:postgres).should be_true + @adapter.query('SELECT "tablename" FROM "pg_tables" WHERE "tablename" NOT LIKE ?', 'pg_%').should include('publications_short_story_collections') + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/collection_spec.rb b/vendor/dm-core-0.9.6/spec/integration/collection_spec.rb new file mode 100644 index 0000000..6e9ddf6 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/collection_spec.rb @@ -0,0 +1,1069 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +if ADAPTER + class Zebra + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + property :age, Integer + property :notes, Text + + has n, :stripes + end + + class Stripe + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + property :age, Integer + property :zebra_id, Integer + + belongs_to :zebra + + def self.sort_by_name + all(:order => [ :name ]) + end + end + + class CollectionSpecParty + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :name, String, :key => true + property :type, Discriminator + end + + class CollectionSpecUser < CollectionSpecParty + def self.default_repository_name + ADAPTER + end + + property :username, String + property :password, String + end + + module CollectionSpecHelper + def setup + Zebra.auto_migrate!(ADAPTER) + Stripe.auto_migrate!(ADAPTER) + + repository(ADAPTER) do + @nancy = Zebra.create(:name => 'Nancy', :age => 11, :notes => 'Spotted!') + @bessie = Zebra.create(:name => 'Bessie', :age => 10, :notes => 'Striped!') + @steve = Zebra.create(:name => 'Steve', :age => 8, :notes => 'Bald!') + + @babe = Stripe.create(:name => 'Babe') + @snowball = Stripe.create(:name => 'snowball') + + @nancy.stripes << @babe + @nancy.stripes << @snowball + @nancy.save + end + end + end + + describe DataMapper::Collection do + include CollectionSpecHelper + + before do + setup + end + + before do + @repository = repository(ADAPTER) + @model = Zebra + @query = DataMapper::Query.new(@repository, @model, :order => [ :id ]) + @collection = @repository.read_many(@query) + @other = @repository.read_many(@query.merge(:limit => 2)) + end + + it "should return the correct repository" do + repository = repository(:legacy) + query = DataMapper::Query.new(repository, @model) + DataMapper::Collection.new(query){}.repository.object_id.should == repository.object_id + end + + it "should be able to add arbitrary objects" do + properties = @model.properties(:default) + + collection = DataMapper::Collection.new(@query) do |c| + c.load([ 4, 'Bob', 10 ]) + c.load([ 5, 'Nancy', 11 ]) + end + + collection.should respond_to(:reload) + + results = collection.entries + results.should have(2).entries + + results.each do |cow| + cow.attribute_loaded?(:name).should == true + cow.attribute_loaded?(:age).should == true + end + + bob, nancy = results[0], results[1] + + bob.name.should eql('Bob') + bob.age.should eql(10) + bob.should_not be_a_new_record + + nancy.name.should eql('Nancy') + nancy.age.should eql(11) + nancy.should_not be_a_new_record + + results.first.should == bob + end + + describe 'model proxying' do + it 'should delegate to a model method' do + stripes = @model.first.stripes + stripes.should respond_to(:sort_by_name) + stripes.sort_by_name.should == [ @babe, @snowball ] + end + end + + describe 'association proxying' do + it "should provide a Query" do + repository(ADAPTER) do + zebras = Zebra.all(:order => [ :name ]) + zebras.query.order.should == [DataMapper::Query::Direction.new(Zebra.properties(ADAPTER)[:name])] + end + end + + it "should proxy the relationships of the model" do + repository(ADAPTER) do + zebras = Zebra.all + zebras.should have(3).entries + zebras.find { |zebra| zebra.name == 'Nancy' }.stripes.should have(2).entries + zebras.should respond_to(:stripes) + zebras.stripes.should == [@babe, @snowball] + end + end + + it "should preserve it's order on reload" do + repository(ADAPTER) do |r| + zebras = Zebra.all(:order => [ :name ]) + + order = %w{ Bessie Nancy Steve } + + zebras.map { |z| z.name }.should == order + + # Force a lazy-load call: + zebras.first.notes + + # The order should be unaffected. + zebras.map { |z| z.name }.should == order + end + end + end + + describe '.new' do + describe 'with non-index keys' do + it 'should instantiate read-only resources' do + @collection = DataMapper::Collection.new(DataMapper::Query.new(@repository, @model, :fields => [ :age ])) do |c| + c.load([ 1 ]) + end + + @collection.size.should == 1 + + resource = @collection.entries[0] + + resource.should be_kind_of(@model) + resource.collection.object_id.should == @collection.object_id + resource.should_not be_new_record + resource.should be_readonly + resource.age.should == 1 + end + end + + describe 'with inheritance property' do + before do + CollectionSpecUser.auto_migrate! + CollectionSpecUser.create(:name => 'John') + + properties = CollectionSpecParty.properties(:default) + end + + it 'should instantiate resources using the inheritance property class' do + query = DataMapper::Query.new(@repository, CollectionSpecParty) + collection = @repository.read_many(query) + collection.should have(1).entries + collection.first.model.should == CollectionSpecUser + end + end + end + + [ true, false ].each do |loaded| + describe " (#{loaded ? '' : 'not '}loaded) " do + if loaded + before do + @collection.to_a + end + end + + describe '#<<' do + it 'should relate each new resource to the collection' do + # resource is orphaned + @nancy.collection.object_id.should_not == @collection.object_id + + @collection << @nancy + + # resource is related + @nancy.collection.object_id.should == @collection.object_id + end + + it 'should return self' do + @collection.<<(@steve).object_id.should == @collection.object_id + end + end + + describe '#all' do + describe 'with no arguments' do + it 'should return self' do + @collection.all.object_id.should == @collection.object_id + end + end + + describe 'with query arguments' do + describe 'should return a Collection' do + before do + @query.update(:offset => 10, :limit => 10) + query = DataMapper::Query.new(@repository, @model) + @unlimited = DataMapper::Collection.new(query) {} + end + + it 'has an offset equal to 10' do + @collection.all.query.offset.should == 10 + end + + it 'has a cumulative offset equal to 11 when passed an offset of 1' do + @collection.all(:offset => 1).query.offset.should == 11 + end + + it 'has a cumulative offset equal to 19 when passed an offset of 9' do + @collection.all(:offset => 9).query.offset.should == 19 + end + + it 'is empty when passed an offset that is out of range' do + pending do + empty_collection = @collection.all(:offset => 10) + empty_collection.should == [] + empty_collection.should be_loaded + end + end + + it 'has an limit equal to 10' do + @collection.all.query.limit.should == 10 + end + + it 'has a limit equal to 5' do + @collection.all(:limit => 5).query.limit.should == 5 + end + + it 'has a limit equal to 10 if passed a limit greater than 10' do + @collection.all(:limit => 11).query.limit.should == 10 + end + + it 'has no limit' do + @unlimited.all.query.limit.should be_nil + end + + it 'has a limit equal to 1000 when passed a limit of 1000' do + @unlimited.all(:limit => 1000).query.limit.should == 1000 + end + end + end + end + + describe '#at' do + it 'should return a Resource' do + resource_at = @collection.at(1) + resource_at.should be_kind_of(DataMapper::Resource) + resource_at.id.should == @bessie.id + end + + it 'should return a Resource when using a negative index' do + resource_at = @collection.at(-1) + resource_at.should be_kind_of(DataMapper::Resource) + resource_at.id.should == @steve.id + end + end + + describe '#build' do + it 'should build a new resource' do + resource = @collection.build(:name => 'John') + resource.should be_kind_of(@model) + resource.should be_new_record + end + + it 'should append the new resource to the collection' do + resource = @collection.build(:name => 'John') + resource.should be_new_record + resource.collection.object_id.should == @collection.object_id + @collection.should include(resource) + end + + it 'should use the query conditions to set default values' do + resource = @collection.build + resource.should be_new_record + resource.name.should be_nil + + @collection.query.update(:name => 'John') + + resource = @collection.build + resource.name.should == 'John' + end + end + + describe '#clear' do + it 'should orphan the resource from the collection' do + entries = @collection.entries + + # resources are related + entries.each { |r| r.collection.object_id.should == @collection.object_id } + + @collection.should have(3).entries + @collection.clear + @collection.should be_empty + + # resources are orphaned + entries.each { |r| r.collection.object_id.should_not == @collection.object_id } + end + + it 'should return self' do + @collection.clear.object_id.should == @collection.object_id + end + end + + describe '#collect!' do + it 'should return self' do + @collection.collect! { |resource| resource }.object_id.should == @collection.object_id + end + end + + describe '#concat' do + it 'should return self' do + @collection.concat(@other).object_id.should == @collection.object_id + end + end + + describe '#create' do + it 'should create a new resource' do + resource = @collection.create(:name => 'John') + resource.should be_kind_of(@model) + resource.should_not be_new_record + end + + it 'should append the new resource to the collection' do + resource = @collection.create(:name => 'John') + resource.should_not be_new_record + resource.collection.object_id.should == @collection.object_id + @collection.should include(resource) + end + + it 'should not append the resource if it was not saved' do + @repository.should_receive(:create).and_return(false) + Zebra.should_receive(:repository).at_least(:once).and_return(@repository) + + resource = @collection.create(:name => 'John') + resource.should be_new_record + + resource.collection.object_id.should_not == @collection.object_id + @collection.should_not include(resource) + end + + it 'should use the query conditions to set default values' do + resource = @collection.create + resource.should_not be_new_record + resource.name.should be_nil + + @collection.query.update(:name => 'John') + + resource = @collection.create + resource.name.should == 'John' + end + end + + describe '#delete' do + it 'should orphan the resource from the collection' do + collection = @nancy.collection + + # resource is related + @nancy.collection.object_id.should == collection.object_id + + collection.should have(1).entries + collection.delete(@nancy) + collection.should be_empty + + # resource is orphaned + @nancy.collection.object_id.should_not == collection.object_id + end + + it 'should return a Resource' do + collection = @nancy.collection + + resource = collection.delete(@nancy) + + resource.should be_kind_of(DataMapper::Resource) + resource.object_id.should == @nancy.object_id + end + end + + describe '#delete_at' do + it 'should orphan the resource from the collection' do + collection = @nancy.collection + + # resource is related + @nancy.collection.object_id.should == collection.object_id + + collection.should have(1).entries + collection.delete_at(0).object_id.should == @nancy.object_id + collection.should be_empty + + # resource is orphaned + @nancy.collection.object_id.should_not == collection.object_id + end + + it 'should return a Resource' do + collection = @nancy.collection + + resource = collection.delete_at(0) + + resource.should be_kind_of(DataMapper::Resource) + resource.object_id.should == @nancy.object_id + end + end + + describe '#destroy!' do + before do + @ids = [ @nancy.id, @bessie.id, @steve.id ] + end + + it 'should destroy the resources in the collection' do + @collection.map { |r| r.id }.should == @ids + @collection.destroy!.should == true + @model.all(:id => @ids).should == [] + @collection.reload.should == [] + end + + it 'should clear the collection' do + @collection.map { |r| r.id }.should == @ids + @collection.destroy!.should == true + @collection.should == [] + end + end + + describe '#each' do + it 'should return self' do + @collection.each { |resource| }.object_id.should == @collection.object_id + end + end + + describe '#each_index' do + it 'should return self' do + @collection.each_index { |resource| }.object_id.should == @collection.object_id + end + end + + describe '#eql?' do + it 'should return true if for the same collection' do + @collection.object_id.should == @collection.object_id + @collection.should be_eql(@collection) + end + + it 'should return true for duplicate collections' do + dup = @collection.dup + dup.should be_kind_of(DataMapper::Collection) + dup.object_id.should_not == @collection.object_id + dup.entries.should == @collection.entries + dup.should be_eql(@collection) + end + + it 'should return false for different collections' do + @collection.should_not be_eql(@other) + end + end + + describe '#fetch' do + it 'should return a Resource' do + @collection.fetch(0).should be_kind_of(DataMapper::Resource) + end + end + + describe '#first' do + describe 'with no arguments' do + it 'should return a Resource' do + first = @collection.first + first.should_not be_nil + first.should be_kind_of(DataMapper::Resource) + first.id.should == @nancy.id + end + end + + describe 'with limit specified' do + it 'should return a Collection' do + collection = @collection.first(2) + + collection.should be_kind_of(DataMapper::Collection) + collection.object_id.should_not == @collection.object_id + + collection.query.order.size.should == 1 + collection.query.order.first.property.should == @model.properties[:id] + collection.query.order.first.direction.should == :asc + + collection.query.offset.should == 0 + collection.query.limit.should == 2 + + collection.length.should == 2 + + collection.entries.map { |r| r.id }.should == [ @nancy.id, @bessie.id ] + end + + it 'should return a Collection if limit is 1' do + collection = @collection.first(1) + + collection.should be_kind_of(DataMapper::Collection) + collection.object_id.should_not == @collection.object_id + end + end + end + + describe '#freeze' do + it 'should freeze the underlying array' do + @collection.should_not be_frozen + @collection.freeze + @collection.should be_frozen + end + end + + describe '#get' do + it 'should find a resource in a collection by key' do + resource = @collection.get(*@nancy.key) + resource.should be_kind_of(DataMapper::Resource) + resource.id.should == @nancy.id + end + + it "should find a resource in a collection by typecasting the key" do + resource = @collection.get(@nancy.key.to_s) + resource.should be_kind_of(DataMapper::Resource) + resource.id.should == @nancy.id + end + + it 'should not find a resource not in the collection' do + @query.update(:offset => 0, :limit => 3) + @david = Zebra.create(:name => 'David', :age => 15, :notes => 'Albino') + @collection.get(@david.key).should be_nil + end + end + + describe '#get!' do + it 'should find a resource in a collection by key' do + resource = @collection.get!(*@nancy.key) + resource.should be_kind_of(DataMapper::Resource) + resource.id.should == @nancy.id + end + + it 'should raise an exception if the resource is not found' do + @query.update(:offset => 0, :limit => 3) + @david = Zebra.create(:name => 'David', :age => 15, :notes => 'Albino') + lambda { + @collection.get!(@david.key) + }.should raise_error(DataMapper::ObjectNotFoundError) + end + end + + describe '#insert' do + it 'should return self' do + @collection.insert(1, @steve).object_id.should == @collection.object_id + end + end + + describe '#last' do + describe 'with no arguments' do + it 'should return a Resource' do + last = @collection.last + last.should_not be_nil + last.should be_kind_of(DataMapper::Resource) + last.id.should == @steve.id + end + end + + describe 'with limit specified' do + it 'should return a Collection' do + collection = @collection.last(2) + + collection.should be_kind_of(DataMapper::Collection) + collection.object_id.should_not == @collection.object_id + + collection.query.order.size.should == 1 + collection.query.order.first.property.should == @model.properties[:id] + collection.query.order.first.direction.should == :desc + + collection.query.offset.should == 0 + collection.query.limit.should == 2 + + collection.length.should == 2 + + collection.entries.map { |r| r.id }.should == [ @bessie.id, @steve.id ] + end + + it 'should return a Collection if limit is 1' do + collection = @collection.last(1) + + collection.class.should == DataMapper::Collection # should be_kind_of(DataMapper::Collection) + collection.object_id.should_not == @collection.object_id + end + end + end + + describe '#load' do + it 'should load resources from the identity map when possible' do + @steve.collection = nil + @repository.identity_map(@model).should_receive(:get).with([ @steve.id ]).and_return(@steve) + + collection = @repository.read_many(@query.merge(:id => @steve.id)) + + collection.size.should == 1 + collection.map { |r| r.object_id }.should == [ @steve.object_id ] + + @steve.collection.object_id.should == collection.object_id + end + + it 'should return a Resource' do + @collection.load([ @steve.id, @steve.name, @steve.age ]).should be_kind_of(DataMapper::Resource) + end + end + + describe '#loaded?' do + if loaded + it 'should return true for an initialized collection' do + @collection.should be_loaded + end + else + it 'should return false for an uninitialized collection' do + @collection.should_not be_loaded + @collection.to_a # load collection + @collection.should be_loaded + end + end + end + + describe '#pop' do + it 'should orphan the resource from the collection' do + collection = @steve.collection + + # resource is related + @steve.collection.object_id.should == collection.object_id + + collection.should have(1).entries + collection.pop.object_id.should == @steve.object_id + collection.should be_empty + + # resource is orphaned + @steve.collection.object_id.should_not == collection.object_id + end + + it 'should return a Resource' do + @collection.pop.key.should == @steve.key + end + end + + describe '#properties' do + it 'should return a PropertySet' do + @collection.properties.should be_kind_of(DataMapper::PropertySet) + end + + it 'should contain same properties as query.fields' do + properties = @collection.properties + properties.entries.should == @collection.query.fields + end + end + + describe '#push' do + it 'should relate each new resource to the collection' do + # resource is orphaned + @nancy.collection.object_id.should_not == @collection.object_id + + @collection.push(@nancy) + + # resource is related + @nancy.collection.object_id.should == @collection.object_id + end + + it 'should return self' do + @collection.push(@steve).object_id.should == @collection.object_id + end + end + + describe '#relationships' do + it 'should return a Hash' do + @collection.relationships.should be_kind_of(Hash) + end + + it 'should contain same properties as query.model.relationships' do + relationships = @collection.relationships + relationships.should == @collection.query.model.relationships + end + end + + describe '#reject' do + it 'should return a Collection with resources that did not match the block' do + rejected = @collection.reject { |resource| false } + rejected.class.should == Array + rejected.should == [ @nancy, @bessie, @steve ] + end + + it 'should return an empty Array if resources matched the block' do + rejected = @collection.reject { |resource| true } + rejected.class.should == Array + rejected.should == [] + end + end + + describe '#reject!' do + it 'should return self if resources matched the block' do + @collection.reject! { |resource| true }.object_id.should == @collection.object_id + end + + it 'should return nil if no resources matched the block' do + @collection.reject! { |resource| false }.should be_nil + end + end + + describe '#reload' do + it 'should return self' do + @collection.reload.object_id.should == @collection.object_id + end + + it 'should replace the collection' do + original = @collection.dup + @collection.reload.should == @collection + @collection.should == original + end + + it 'should reload lazily initialized fields' do + pending 'Move to unit specs' + + @repository.should_receive(:all) do |model,query| + model.should == @model + + query.should be_instance_of(DataMapper::Query) + query.reload.should == true + query.offset.should == 0 + query.limit.should == 10 + query.order.should == [] + query.fields.should == @model.properties.defaults + query.links.should == [] + query.includes.should == [] + query.conditions.should == [ [ :eql, @model.properties[:id], [ 1, 2, 3 ] ] ] + + @collection + end + + @collection.reload + end + end + + describe '#replace' do + it "should orphan each existing resource from the collection if loaded?" do + entries = @collection.entries + + # resources are related + entries.each { |r| r.collection.object_id.should == @collection.object_id } + + @collection.should have(3).entries + @collection.replace([]).object_id.should == @collection.object_id + @collection.should be_empty + + # resources are orphaned + entries.each { |r| r.collection.object_id.should_not == @collection.object_id } + end + + it 'should relate each new resource to the collection' do + # resource is orphaned + @nancy.collection.object_id.should_not == @collection.object_id + + @collection.replace([ @nancy ]) + + # resource is related + @nancy.collection.object_id.should == @collection.object_id + end + + it 'should replace the contents of the collection' do + other = [ @nancy ] + @collection.should_not == other + @collection.replace(other) + @collection.should == other + @collection.object_id.should_not == @other.object_id + end + end + + describe '#reverse' do + [ true, false ].each do |loaded| + describe "on a collection where loaded? == #{loaded}" do + before do + @collection.to_a if loaded + end + + it 'should return a Collection with reversed entries' do + reversed = @collection.reverse + reversed.should be_kind_of(DataMapper::Collection) + reversed.object_id.should_not == @collection.object_id + reversed.entries.should == @collection.entries.reverse + + reversed.query.order.size.should == 1 + reversed.query.order.first.property.should == @model.properties[:id] + reversed.query.order.first.direction.should == :desc + end + end + end + end + + describe '#reverse!' do + it 'should return self' do + @collection.reverse!.object_id.should == @collection.object_id + end + end + + describe '#reverse_each' do + it 'should return self' do + @collection.reverse_each { |resource| }.object_id.should == @collection.object_id + end + end + + describe '#select' do + it 'should return an Array with resources that matched the block' do + selected = @collection.select { |resource| true } + selected.class.should == Array + selected.should == @collection + end + + it 'should return an empty Array if no resources matched the block' do + selected = @collection.select { |resource| false } + selected.class.should == Array + selected.should == [] + end + end + + describe '#shift' do + it 'should orphan the resource from the collection' do + collection = @nancy.collection + + # resource is related + @nancy.collection.object_id.should == collection.object_id + + collection.should have(1).entries + collection.shift.object_id.should == @nancy.object_id + collection.should be_empty + + # resource is orphaned + @nancy.collection.object_id.should_not == collection.object_id + end + + it 'should return a Resource' do + @collection.shift.key.should == @nancy.key + end + end + + [ :slice, :[] ].each do |method| + describe '#slice' do + describe 'with an index' do + it 'should return a Resource' do + resource = @collection.send(method, 0) + resource.should be_kind_of(DataMapper::Resource) + resource.id.should == @nancy.id + end + end + + describe 'with a start and length' do + it 'should return a Collection' do + sliced = @collection.send(method, 0, 1) + sliced.should be_kind_of(DataMapper::Collection) + sliced.object_id.should_not == @collection.object_id + sliced.length.should == 1 + sliced.map { |r| r.id }.should == [ @nancy.id ] + end + end + + describe 'with a Range' do + it 'should return a Collection' do + sliced = @collection.send(method, 0..1) + sliced.should be_kind_of(DataMapper::Collection) + sliced.object_id.should_not == @collection.object_id + sliced.length.should == 2 + sliced.map { |r| r.id }.should == [ @nancy.id, @bessie.id ] + end + end + end + end + + describe '#slice!' do + describe 'with an index' do + it 'should return a Resource' do + resource = @collection.slice!(0) + resource.should be_kind_of(DataMapper::Resource) + end + end + + describe 'with a start and length' do + it 'should return an Array' do + sliced = @collection.slice!(0, 1) + sliced.class.should == Array + sliced.map { |r| r.id }.should == [ @nancy.id ] + end + end + + describe 'with a Range' do + it 'should return a Collection' do + sliced = @collection.slice(0..1) + sliced.should be_kind_of(DataMapper::Collection) + sliced.object_id.should_not == @collection.object_id + sliced.length.should == 2 + sliced[0].id.should == @nancy.id + sliced[1].id.should == @bessie.id + end + end + end + + describe '#sort' do + it 'should return an Array' do + sorted = @collection.sort { |a,b| a.age <=> b.age } + sorted.class.should == Array + end + end + + describe '#sort!' do + it 'should return self' do + @collection.sort! { |a,b| 0 }.object_id.should == @collection.object_id + end + end + + describe '#unshift' do + it 'should relate each new resource to the collection' do + # resource is orphaned + @nancy.collection.object_id.should_not == @collection.object_id + + @collection.unshift(@nancy) + + # resource is related + @nancy.collection.object_id.should == @collection.object_id + end + + it 'should return self' do + @collection.unshift(@steve).object_id.should == @collection.object_id + end + end + + describe '#update!' do + it 'should update the resources in the collection' do + pending do + # this will not pass with new update! + # update! should never loop through and set attributes + # even if it is loaded, and it will not reload the + # changed objects (even with reload=true, as objects + # are created is not in any identity map) + names = [ @nancy.name, @bessie.name, @steve.name ] + @collection.map { |r| r.name }.should == names + @collection.update!(:name => 'John') + @collection.map { |r| r.name }.should_not == names + @collection.map { |r| r.name }.should == %w[ John ] * 3 + end + end + + it 'should not update loaded resources unless forced' do + repository(ADAPTER) do + nancy = Zebra.first + nancy.name.should == "Nancy" + + collection = Zebra.all(:name => ["Nancy","Bessie"]) + collection.update!(:name => "Stevie") + + nancy.name.should == "Nancy" + end + end + + it 'should update loaded resources if forced' do + repository(ADAPTER) do + nancy = Zebra.first + nancy.name.should == "Nancy" + + collection = Zebra.all(:name => ["Nancy","Bessie"]) + collection.update!({:name => "Stevie"},true) + + nancy.name.should == "Stevie" + end + end + + it 'should update collection-query when updating' do + repository(ADAPTER) do + collection = Zebra.all(:name => ["Nancy","Bessie"]) + collection.query.conditions.first[2].should == ["Nancy","Bessie"] + collection.length.should == 2 + collection.update!(:name => "Stevie") + collection.length.should == 2 + collection.query.conditions.first[2].should == "Stevie" + end + end + end + + describe '#keys' do + it 'should return a hash of keys' do + keys = @collection.send(:keys) + keys.length.should == 1 + keys.each{|property,values| values.should == [1,2,3]} + end + + it 'should return an empty hash if collection is empty' do + keys = Zebra.all(:id.gt => 10000).send(:keys) + keys.should == {} + end + end + + describe '#values_at' do + it 'should return an Array' do + values = @collection.values_at(0) + values.class.should == Array + end + + it 'should return an Array of the resources at the index' do + @collection.values_at(0).entries.map { |r| r.id }.should == [ @nancy.id ] + end + end + + describe 'with lazy loading' do + it "should take a materialization block" do + collection = DataMapper::Collection.new(@query) do |c| + c.should == [] + c.load([ 1, 'Bob', 10 ]) + c.load([ 2, 'Nancy', 11 ]) + end + + collection.should_not be_loaded + collection.length.should == 2 + collection.should be_loaded + end + + it "should load lazy columns when using offset" do + repository(ADAPTER) do + zebras = Zebra.all(:offset => 1, :limit => 2) + zebras.first.notes.should_not be_nil + end + end + end + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/data_objects_adapter_spec.rb b/vendor/dm-core-0.9.6/spec/integration/data_objects_adapter_spec.rb new file mode 100644 index 0000000..156d99c --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/data_objects_adapter_spec.rb @@ -0,0 +1,32 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +if [ HAS_SQLITE3, HAS_MYSQL, HAS_POSTGRES ].include?(ADAPTER) + describe DataMapper::Adapters::DataObjectsAdapter, "with #{ADAPTER}" do + describe 'a connection' do + before do + @adapter = DataMapper::Repository.adapters[ADAPTER] + @transaction = DataMapper::Transaction.new(@adapter) + + @command = mock('command', :execute_non_query => nil) + @connection = mock('connection', :create_command => @command) + DataObjects::Connection.stub!(:new).and_return(@connection) + end + + it 'should close automatically when no longer needed' do + @connection.should_receive(:close) + @adapter.execute('SELECT 1') + end + + it 'should not close when a current transaction is active' do + @connection.should_receive(:create_command).with('SELECT 1').twice.and_return(@command) + @connection.should_not_receive(:close) + + @transaction.begin + @transaction.within do + @adapter.execute('SELECT 1') + @adapter.execute('SELECT 1') + end + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/dependency_queue_spec.rb b/vendor/dm-core-0.9.6/spec/integration/dependency_queue_spec.rb new file mode 100644 index 0000000..74dbdd1 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/dependency_queue_spec.rb @@ -0,0 +1,46 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe "DataMapper::DependencyQueue" do + before :each do + @q = DataMapper::DependencyQueue.new + @dependencies = @q.instance_variable_get("@dependencies") + end + + describe "#add" do + it "should store the supplied callback in @dependencies" do + @q.add('MissingConstant') { true } + @dependencies['MissingConstant'].first.call.should == true + end + end + + describe "#resolve!" do + describe "(when dependency is not defined)" do + it "should not alter @dependencies" do + @q.add('MissingConstant') { true } + old_dependencies = @dependencies.dup + @q.resolve! + old_dependencies.should == @dependencies + end + end + + describe "(when dependency is defined)" do + before :each do + @q.add('MissingConstant') { |klass| klass.instance_variable_set("@resolved", true) } # add before MissingConstant is loaded + + class MissingConstant + end + end + + it "should execute stored callbacks" do + @q.resolve! + MissingConstant.instance_variable_get("@resolved").should == true + end + + it "should clear @dependencies" do + @q.resolve! + @dependencies['MissingConstant'].should be_empty + end + end + end + +end diff --git a/vendor/dm-core-0.9.6/spec/integration/model_spec.rb b/vendor/dm-core-0.9.6/spec/integration/model_spec.rb new file mode 100644 index 0000000..2fcb489 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/model_spec.rb @@ -0,0 +1,127 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +if ADAPTER + module ModelSpec + class STI + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + property :type, Discriminator + end + + class STIDescendant < STI + end + end + + describe "DataMapper::Model with #{ADAPTER}" do + before do + repository(ADAPTER) do + ModelSpec::STI.auto_migrate! + end + + @planet = DataMapper::Model.new('planet') do + def self.default_repository_name; ADAPTER end + property :name, String, :key => true + property :distance, Integer + end + + @moon = DataMapper::Model.new('moon') do + def self.default_repository_name; ADAPTER end + property :id, DM::Serial + property :name, String + end + + @planet.auto_migrate!(ADAPTER) + @moon.auto_migrate!(ADAPTER) + + repository(ADAPTER) do + @moon.create(:name => "Charon") + @moon.create(:name => "Phobos") + end + end + + describe '.new' do + it 'should be able to persist' do + repository(ADAPTER) do + pluto = @planet.new + pluto.name = 'Pluto' + pluto.distance = 1_000_000 + pluto.save + + clone = @planet.get!('Pluto') + clone.name.should == 'Pluto' + clone.distance.should == 1_000_000 + end + end + end + + describe ".get" do + include LoggingHelper + + it "should typecast key" do + resource = nil + lambda { + repository(ADAPTER) do + resource = @moon.get("1") + end + }.should_not raise_error + resource.should be_kind_of(DataMapper::Resource) + end + + it "should use the identity map within a repository block" do + logger do |log| + repository(ADAPTER) do + @moon.get("1") + @moon.get(1) + end + log.readlines.size.should == 1 + end + end + + it "should not use the identity map outside a repository block" do + logger do |log| + @moon.get(1) + @moon.get(1) + log.readlines.size.should == 2 + end + end + end + + describe ".base_model" do + describe "(when called on base model)" do + it "should refer to itself" do + ModelSpec::STI.base_model.should == ModelSpec::STI + end + end + describe "(when called on descendant model)" do + it "should refer to the base model" do + ModelSpec::STIDescendant.base_model.should == ModelSpec::STI.base_model + end + end + end + + it 'should provide #load' do + ModelSpec::STI.should respond_to(:load) + end + + describe '#load' do + it 'should load resources with nil discriminator fields' do + resource = ModelSpec::STI.create(:name => 'resource') + query = ModelSpec::STI.all.query + fields = query.fields + + fields.should == ModelSpec::STI.properties(ADAPTER).slice(:id, :name, :type) + + # would blow up prior to fix + lambda { + ModelSpec::STI.load([ resource.id, resource.name, nil ], query) + }.should_not raise_error(NoMethodError) + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/mysql_adapter_spec.rb b/vendor/dm-core-0.9.6/spec/integration/mysql_adapter_spec.rb new file mode 100644 index 0000000..6b9ba78 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/mysql_adapter_spec.rb @@ -0,0 +1,85 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +if HAS_MYSQL + describe DataMapper::Adapters::MysqlAdapter do + before :all do + @adapter = repository(:mysql).adapter + end + + before :all do + class Sputnik + include DataMapper::Resource + + property :id, Serial + property :name, DM::Text + property :object, Object + + auto_migrate!(:mysql) + end + end + + it "should handle Object type" do + time = Time.now + repository(:mysql) do + Sputnik.create(:name => "Sputnik", :object => time) + Sputnik.first.object.should == time + end + end + + describe "auto migrating" do + it "#upgrade_model should work" do + @adapter.destroy_model_storage(repository(:mysql), Sputnik) + @adapter.storage_exists?("sputniks").should == false + Sputnik.auto_migrate!(:mysql) + @adapter.storage_exists?("sputniks").should == true + @adapter.field_exists?("sputniks", "new_prop").should == false + Sputnik.property :new_prop, Integer + Sputnik.auto_upgrade!(:mysql) + @adapter.field_exists?("sputniks", "new_prop").should == true + end + end + + describe "querying metadata" do + it "#storage_exists? should return true for tables that exist" do + @adapter.storage_exists?("sputniks").should == true + end + + it "#storage_exists? should return false for tables that don't exist" do + @adapter.storage_exists?("space turds").should == false + end + + it "#field_exists? should return true for columns that exist" do + @adapter.field_exists?("sputniks", "name").should == true + end + + it "#storage_exists? should return false for tables that don't exist" do + @adapter.field_exists?("sputniks", "plur").should == false + end + end + + describe "handling transactions" do + before do + @transaction = DataMapper::Transaction.new(@adapter) + end + + it "should rollback changes when #rollback_transaction is called" do + repository(:mysql) do + @transaction.commit do |trans| + Sputnik.create(:name => 'my pretty sputnik') + trans.rollback + end + Sputnik.all(:name => 'my pretty sputnik').should be_empty + end + end + + it "should commit changes when #commit_transaction is called" do + repository(:mysql) do + @transaction.commit do + Sputnik.create(:name => 'my pretty sputnik') + end + Sputnik.all(:name => 'my pretty sputnik').size.should == 1 + end + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/postgres_adapter_spec.rb b/vendor/dm-core-0.9.6/spec/integration/postgres_adapter_spec.rb new file mode 100644 index 0000000..c8bf68c --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/postgres_adapter_spec.rb @@ -0,0 +1,731 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +if HAS_POSTGRES + describe DataMapper::Adapters::PostgresAdapter do + before :all do + @adapter = repository(:postgres).adapter + end + + describe "auto migrating" do + before :all do + class Sputnik + include DataMapper::Resource + + property :id, Serial + property :name, DM::Text + end + end + + it "#upgrade_model should work" do + @adapter.destroy_model_storage(repository(:postgres), Sputnik) + @adapter.storage_exists?("sputniks").should be_false + Sputnik.auto_migrate!(:postgres) + @adapter.storage_exists?("sputniks").should be_true + @adapter.field_exists?("sputniks", "new_prop").should be_false + Sputnik.property :new_prop, DM::Serial + @adapter.send(:drop_sequence, repository(:postgres), Sputnik.new_prop) + Sputnik.auto_upgrade!(:postgres) + @adapter.field_exists?("sputniks", "new_prop").should == true + end + end + + describe '#312' do + it "should behave sanely for time fields" do + + class Thing + include DataMapper::Resource + property :id, Integer, :serial => true + property :created_at, Time + end + + Thing.auto_migrate!(:postgres) + + repository(:postgres) do + time_now = Time.now + + t = Thing.new + t.created_at = time_now + + t.save + + t1 = Thing.first + t1.created_at.should == time_now + end + + end + end + + describe "querying metadata" do + before :all do + class Sputnik + include DataMapper::Resource + + property :id, Serial + property :name, DM::Text + end + + Sputnik.auto_migrate!(:postgres) + end + + it "#storage_exists? should return true for tables that exist" do + @adapter.storage_exists?("sputniks").should == true + end + + it "#storage_exists? should return false for tables that don't exist" do + @adapter.storage_exists?("space turds").should == false + end + + it "#field_exists? should return true for columns that exist" do + @adapter.field_exists?("sputniks", "name").should == true + end + + it "#field_exists? should return false for columns that don't exist" do + @adapter.field_exists?("sputniks", "plur").should == false + end + end + + describe "handling transactions" do + before :all do + class Sputnik + include DataMapper::Resource + + property :id, Serial + property :name, DM::Text + end + + Sputnik.auto_migrate!(:postgres) + end + + before do + @transaction = DataMapper::Transaction.new(@adapter) + end + + it "should rollback changes when #rollback_transaction is called" do + @transaction.commit do |trans| + @adapter.execute("INSERT INTO sputniks (name) VALUES ('my pretty sputnik')") + trans.rollback + end + @adapter.query("SELECT * FROM sputniks WHERE name = 'my pretty sputnik'").empty?.should == true + end + + it "should commit changes when #commit_transaction is called" do + @transaction.commit do + @adapter.execute("INSERT INTO sputniks (name) VALUES ('my pretty sputnik')") + end + @adapter.query("SELECT * FROM sputniks WHERE name = 'my pretty sputnik'").size.should == 1 + end + end + + describe "reading & writing a database" do + before :all do + class User + include DataMapper::Resource + + property :id, Serial + property :name, DM::Text + end + + class Voyager + include DataMapper::Resource + storage_names[:postgres] = 'voyagers' + + property :id, Serial + property :age, Integer + end + + # Voyager.auto_migrate!(:postgres) + end + + before do + User.auto_migrate!(:postgres) + + @adapter.execute("INSERT INTO users (name) VALUES ('Paul')") + end + + it "should be able to specify a schema name as part of the storage name" do + pending "This works, but no create-schema support in PostgresAdapter to easily test with" + lambda do + repository(:postgres) do + Voyager.create(:age => 1_000) + end + end.should_not raise_error + end + + it 'should be able to #execute an arbitrary query' do + result = @adapter.execute("INSERT INTO users (name) VALUES ('Sam')") + + result.affected_rows.should == 1 + end + + it 'should be able to #query' do + result = @adapter.query("SELECT * FROM users") + + result.should be_kind_of(Array) + row = result.first + row.should be_kind_of(Struct) + row.members.should == %w{id name} + + row.id.should == 1 + row.name.should == 'Paul' + end + + it 'should return an empty array if #query found no rows' do + @adapter.execute("DELETE FROM users") + + result = nil + lambda { result = @adapter.query("SELECT * FROM users") }.should_not raise_error + + result.should be_kind_of(Array) + result.size.should == 0 + end + end + + describe "CRUD for serial Key" do + before :all do + class VideoGame + include DataMapper::Resource + + property :id, Serial + property :name, String + property :object, Object + end + end + + before do + VideoGame.auto_migrate!(:postgres) + end + + it 'should be able to create a record' do + time = Time.now + game = VideoGame.new(:name => 'System Shock', :object => time) + repository(:postgres) do + game.save + game.should_not be_a_new_record + game.should_not be_dirty + + saved = VideoGame.first(:name => game.name) + saved.id.should == game.id + saved.object.should == time + end + end + + it 'should be able to read a record' do + name = 'Wing Commander: Privateer' + id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?) RETURNING id', name).insert_id + + game = repository(:postgres) do + VideoGame.get(id) + end + + game.name.should == name + game.should_not be_dirty + game.should_not be_a_new_record + end + + it 'should be able to update a record' do + name = 'Resistance: Fall of Mon' + id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?) RETURNING id', name).insert_id + + game = repository(:postgres) do + VideoGame.get(id) + end + + game.should_not be_a_new_record + + game.should_not be_dirty + game.name = game.name.sub(/Mon/, 'Man') + game.should be_dirty + + repository(:postgres) do + game.save + end + + game.should_not be_dirty + + clone = repository(:postgres) do + VideoGame.get(id) + end + + clone.name.should == game.name + end + + it 'should be able to delete a record' do + name = 'Zelda' + id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?) RETURNING id', name).insert_id + + game = repository(:postgres) do + VideoGame.get(id) + end + + game.name.should == name + + repository(:postgres) do + game.destroy.should be_true + end + + game.should be_a_new_record + game.should be_dirty + end + + it 'should respond to Resource#get' do + name = 'Contra' + id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?) RETURNING id', name).insert_id + + contra = repository(:postgres) { VideoGame.get(id) } + + contra.should_not be_nil + contra.should_not be_dirty + contra.should_not be_a_new_record + contra.id.should == id + end + end + + describe "CRUD for Composite Key" do + before :all do + class BankCustomer + include DataMapper::Resource + + property :bank, String, :key => true + property :account_number, String, :key => true + property :name, String + end + end + + before do + BankCustomer.auto_migrate!(:postgres) + end + + it 'should be able to create a record' do + customer = BankCustomer.new(:bank => 'Community Bank', :account_number => '123456', :name => 'David Hasselhoff') + repository(:postgres) do + customer.save + end + + customer.should_not be_a_new_record + customer.should_not be_dirty + + row = @adapter.query('SELECT "bank", "account_number" FROM "bank_customers" WHERE "name" = ?', customer.name).first + row.bank.should == customer.bank + row.account_number.should == customer.account_number + end + + it 'should be able to read a record' do + bank, account_number, name = 'Chase', '4321', 'Super Wonderful' + @adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name) + + repository(:postgres) do + BankCustomer.get(bank, account_number).name.should == name + end + end + + it 'should be able to update a record' do + bank, account_number, name = 'Wells Fargo', '00101001', 'Spider Pig' + @adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name) + + customer = repository(:postgres) do + BankCustomer.get(bank, account_number) + end + + customer.name = 'Bat-Pig' + + customer.should_not be_a_new_record + customer.should be_dirty + + customer.save + + customer.should_not be_dirty + + clone = repository(:postgres) do + BankCustomer.get(bank, account_number) + end + + clone.name.should == customer.name + end + + it 'should be able to delete a record' do + bank, account_number, name = 'Megacorp', 'ABC', 'Flash Gordon' + @adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name) + + customer = repository(:postgres) do + BankCustomer.get(bank, account_number) + end + + customer.name.should == name + + repository(:postgres) do + customer.destroy.should be_true + end + + customer.should be_a_new_record + customer.should be_dirty + end + + it 'should respond to Resource#get' do + bank, account_number, name = 'Conchords', '1100101', 'Robo Boogie' + @adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name) + + robots = repository(:postgres) { BankCustomer.get(bank, account_number) } + + robots.should_not be_nil + robots.should_not be_dirty + robots.should_not be_a_new_record + robots.bank.should == bank + robots.account_number.should == account_number + end + end + + describe "Ordering a Query" do + before :all do + class SailBoat + include DataMapper::Resource + property :id, Serial + property :name, String + property :port, String + end + end + + before do + SailBoat.auto_migrate!(:postgres) + + repository(:postgres) do + SailBoat.create(:id => 1, :name => "A", :port => "C") + SailBoat.create(:id => 2, :name => "B", :port => "B") + SailBoat.create(:id => 3, :name => "C", :port => "A") + end + end + + it "should order results" do + repository(:postgres) do + result = SailBoat.all(:order => [ + DataMapper::Query::Direction.new(SailBoat.properties[:name], :asc) + ]) + result[0].id.should == 1 + + result = SailBoat.all(:order => [ + DataMapper::Query::Direction.new(SailBoat.properties[:port], :asc) + ]) + result[0].id.should == 3 + + result = SailBoat.all(:order => [ + DataMapper::Query::Direction.new(SailBoat.properties[:name], :asc), + DataMapper::Query::Direction.new(SailBoat.properties[:port], :asc) + ]) + result[0].id.should == 1 + + result = SailBoat.all(:order => [ + SailBoat.properties[:name], + DataMapper::Query::Direction.new(SailBoat.properties[:port], :asc) + ]) + result[0].id.should == 1 + end + end + end + + describe "Lazy Loaded Properties" do + before :all do + class SailBoat + include DataMapper::Resource + property :id, Serial + property :notes, String, :lazy => [:notes] + property :trip_report, String, :lazy => [:notes,:trip] + property :miles, Integer, :lazy => [:trip] + end + end + + before do + SailBoat.auto_migrate!(:postgres) + + repository(:postgres) do + SailBoat.create(:id => 1, :notes=>'Note',:trip_report=>'Report',:miles=>23) + SailBoat.create(:id => 2, :notes=>'Note',:trip_report=>'Report',:miles=>23) + SailBoat.create(:id => 3, :notes=>'Note',:trip_report=>'Report',:miles=>23) + end + end + + it "should lazy load" do + result = repository(:postgres) { SailBoat.all.to_a } + + result[0].attribute_loaded?(:notes).should be_false + result[0].attribute_loaded?(:trip_report).should be_false + result[1].attribute_loaded?(:notes).should be_false + + result[1].notes.should_not be_nil + + result[1].attribute_loaded?(:notes).should be_true + result[1].attribute_loaded?(:trip_report).should be_true + result[1].attribute_loaded?(:miles).should be_false + + result = repository(:postgres) { SailBoat.all.to_a } + + result[0].attribute_loaded?(:trip_report).should be_false + result[0].attribute_loaded?(:miles).should be_false + + result[1].trip_report.should_not be_nil + result[2].attribute_loaded?(:miles).should be_true + end + end + + describe "finders" do + before :all do + class SerialFinderSpec + include DataMapper::Resource + + property :id, Serial + property :sample, String + end + end + + before do + SerialFinderSpec.auto_migrate!(:postgres) + + repository(:postgres) do + 100.times do + SerialFinderSpec.create(:sample => rand.to_s) + end + end + end + + it "should return all available rows" do + repository(:postgres) do + SerialFinderSpec.all.should have(100).entries + end + end + + it "should allow limit and offset" do + repository(:postgres) do + SerialFinderSpec.all(:limit => 50).should have(50).entries + + SerialFinderSpec.all(:limit => 20, :offset => 40).map { |entry| entry.id }.should == SerialFinderSpec.all[40...60].map { |entry| entry.id } + end + end + + it "should lazy-load missing attributes" do + sfs = repository(:postgres) do + SerialFinderSpec.first(:fields => [ :id ]) + end + + sfs.should be_a_kind_of(SerialFinderSpec) + sfs.should_not be_a_new_record + + sfs.attribute_loaded?(:sample).should be_false + sfs.sample + sfs.attribute_loaded?(:sample).should be_true + end + + it "should translate an Array to an IN clause" do + ids = repository(:postgres) do + SerialFinderSpec.all(:limit => 10).map { |entry| entry.id } + end + + results = repository(:postgres) do + SerialFinderSpec.all(:id => ids) + end + + results.size.should == 10 + results.map { |entry| entry.id }.should == ids + end + end + + describe "belongs_to associations" do + before :all do + class Engine + include DataMapper::Resource + def self.default_repository_name; :postgres end + + property :id, Serial + property :name, String + end + + class Yard + include DataMapper::Resource + def self.default_repository_name; :postgres end + + property :id, Serial + property :name, String + property :engine_id, Integer + + belongs_to :engine + end + end + + before do + Engine.auto_migrate!(:postgres) + + @adapter.execute('INSERT INTO "engines" ("id", "name") values (?, ?)', 1, 'engine1') + @adapter.execute('INSERT INTO "engines" ("id", "name") values (?, ?)', 2, 'engine2') + + Yard.auto_migrate!(:postgres) + + @adapter.execute('INSERT INTO "yards" ("id", "name", "engine_id") values (?, ?, ?)', 1, 'yard1', 1) + end + + it "should load without the parent" + + it 'should allow substituting the parent' do + repository(:postgres) do + y = Yard.first(:id => 1) + e = Engine.first(:id => 2) + y.engine = e + y.save + end + + repository(:postgres) do + Yard.first(:id => 1).engine_id.should == 2 + end + end + + it "#belongs_to" do + yard = Yard.new + yard.should respond_to(:engine) + yard.should respond_to(:engine=) + end + + it "should load the associated instance" do + y = repository(:postgres) do + Yard.first(:id => 1) + end + y.engine.should_not be_nil + y.engine.id.should == 1 + y.engine.name.should == "engine1" + end + + it 'should save the association key in the child' do + repository(:postgres) do + e = Engine.first(:id => 2) + Yard.create(:id => 2, :name => 'yard2', :engine => e) + end + + repository(:postgres) do + Yard.first(:id => 2).engine_id.should == 2 + end + end + + it 'should save the parent upon saving of child' do + repository(:postgres) do + e = Engine.new(:id => 10, :name => "engine10") + y = Yard.new(:id => 10, :name => "Yard10", :engine => e) + y.save + + y.engine_id.should == 10 + end + + repository(:postgres) do + Engine.first(:id => 10).should_not be_nil + end + end + end + + describe "has n associations" do + before :all do + class Host + include DataMapper::Resource + def self.default_repository_name; :postgres end + + property :id, Serial + property :name, String + + has n, :slices + end + + class Slice + include DataMapper::Resource + def self.default_repository_name; :postgres end + + property :id, Serial + property :name, String + property :host_id, Integer + + belongs_to :host + end + end + + before do + Host.auto_migrate!(:postgres) + Slice.auto_migrate!(:postgres) + + @adapter.execute('INSERT INTO "hosts" ("id", "name") values (?, ?)', 1, 'host1') + @adapter.execute('INSERT INTO "hosts" ("id", "name") values (?, ?)', 2, 'host2') + + @adapter.execute('INSERT INTO "slices" ("id", "name", "host_id") values (?, ?, ?)', 1, 'slice1', 1) + @adapter.execute('INSERT INTO "slices" ("id", "name", "host_id") values (?, ?, ?)', 2, 'slice2', 1) + end + + it "#has n" do + h = Host.new + h.should respond_to(:slices) + end + + it "should allow removal of a child through a loaded association" do + h = repository(:postgres) do + Host.first(:id => 1) + end + + s = h.slices.first + + h.slices.delete(s) + h.slices.size.should == 1 + h.save + + s = repository(:postgres) do + Slice.first(:id => s.id) + end + + s.host.should be_nil + s.host_id.should be_nil + end + + it "should load the associated instances" do + h = repository(:postgres) do + Host.first(:id => 1) + end + + h.slices.should_not be_nil + h.slices.size.should == 2 + h.slices.first.id.should == 1 + h.slices.last.id.should == 2 + end + + it "should add and save the associated instance" do + repository(:postgres) do + h = Host.first(:id => 1) + + h.slices << Slice.new(:id => 3, :name => 'slice3') + h.save + + s = repository(:postgres) do + Slice.first(:id => 3) + end + + s.host.id.should == 1 + end + end + + it "should not save the associated instance if the parent is not saved" do + repository(:postgres) do + h = Host.new(:id => 10, :name => "host10") + h.slices << Slice.new(:id => 10, :name => 'slice10') + end + + repository(:postgres) do + Slice.first(:id => 10).should be_nil + end + end + + it "should save the associated instance upon saving of parent" do + repository(:postgres) do + h = Host.new(:id => 10, :name => "host10") + h.slices << Slice.new(:id => 10, :name => 'slice10') + h.save + end + + s = repository(:postgres) do + Slice.first(:id => 10) + end + + s.should_not be_nil + s.host.should_not be_nil + s.host.id.should == 10 + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/property_spec.rb b/vendor/dm-core-0.9.6/spec/integration/property_spec.rb new file mode 100644 index 0000000..112eb56 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/property_spec.rb @@ -0,0 +1,233 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +gem 'fastercsv', '>=1.2.3' +require 'fastercsv' + +if ADAPTER + describe DataMapper::Property, "with #{ADAPTER}" do + describe " tracking strategies" do + before :all do + class Actor + include DataMapper::Resource + + property :id, Serial + property :name, String, :track => :set # :track default is :get for mutable types + property :notes, DataMapper::Types::Text + property :age, Integer # :track default is :set for immutable types + property :rating, Integer + property :location, String + property :lead, TrueClass, :track => :load + property :cv, Object # :track should be :hash + property :agent, String, :track => :hash # :track only Object#hash value on :load. + # Potentially faster, but less safe, so use judiciously, when the odds of a hash-collision are low. + end + end + + before do + Actor.auto_migrate!(ADAPTER) + end + + it "should set up tracking information" do + Actor.properties[:name].track.should == :set + Actor.properties[:location].track.should == :get + Actor.properties[:rating].track.should == :set + Actor.properties[:lead].track.should == :load + Actor.properties[:cv].track.should == :hash + Actor.properties[:agent].track.should == :hash + end + + it "should track on :set" do + repository(ADAPTER) do + bob = Actor.new(:name => 'bob') + bob.save + + bob.original_values.should_not have_key(:name) + bob.dirty?.should == false + + bob.name = "Bob" + bob.original_values.should have_key(:name) + bob.original_values[:name].should == 'bob' + bob.dirty?.should == true + end + end + + it "should track on :get" do + repository(ADAPTER) do + jon = Actor.new(:name => 'jon', :location => 'dallas') + jon.save + + jon.location + jon.original_values.should have_key(:location) + jon.original_values[:location].should == 'dallas' + + jon.dirty?.should be_false + jon.save.should be_true + + jon.location.upcase! + jon.location.should == 'DALLAS' + jon.original_values[:location].should == 'dallas' + + jon.dirty?.should be_true + jon.save.should be_true + + jon.location << '!' + jon.original_values[:location].should == 'DALLAS' + jon.dirty?.should be_true + end + end + + it "should track on :load" do + repository(ADAPTER) do + jan = Actor.create(:name => 'jan', :lead => true) + jan.lead = false + jan.original_values[:lead].should be_true + jan.dirty?.should == true + end + repository(ADAPTER) do + jan = Actor.first + jan.original_values.should have_key(:lead) + jan.original_values[:lead].should be_true + jan.dirty?.should == false + end + end + + it "should track on :hash" do + cv = { 2005 => "Othello" } + repository(ADAPTER) do + tom = Actor.create(:name => 'tom', :cv => cv) + end + repository(ADAPTER) do + tom = Actor.first(:name => 'tom') + tom.cv.merge!({2006 => "Macbeth"}) + + tom.original_values.should have_key(:cv) + # tom.original_values[:cv].should == cv.hash + tom.cv.should == { 2005 => "Othello", 2006 => "Macbeth" } + tom.dirty?.should == true + end + end + + it "should track with lazy text fields (#342)" do + repository(ADAPTER) do + tim = Actor.create(:name => 'tim') + end + repository(ADAPTER) do + tim = Actor.first(:name => 'tim') + tim.notes # make sure they're loaded... + tim.dirty?.should be_false + tim.save.should be_true + tim.notes = "Testing" + tim.dirty?.should be_true + tim.save.should be_true + end + repository(ADAPTER) do + tim = Actor.first(:name => 'tim') + tim.notes.should == "Testing" + end + end + end + + describe "lazy loading" do + before :all do + class RowBoat + include DataMapper::Resource + property :id, Serial + property :notes, String, :lazy => [:notes] + property :trip_report, String, :lazy => [:notes,:trip] + property :miles, Integer, :lazy => [:trip] + end + end + + before do + RowBoat.auto_migrate!(ADAPTER) + + repository(ADAPTER) do + RowBoat.create(:id => 1, :notes=>'Note',:trip_report=>'Report',:miles=>23) + RowBoat.create(:id => 2, :notes=>'Note',:trip_report=>'Report',:miles=>23) + RowBoat.create(:id => 3, :notes=>'Note',:trip_report=>'Report',:miles=>23) + end + end + + it "should lazy load in context" do + result = repository(ADAPTER) { RowBoat.all.to_a } + + result[0].attribute_loaded?(:notes).should be_false + result[0].attribute_loaded?(:trip_report).should be_false + result[1].attribute_loaded?(:notes).should be_false + + result[0].notes.should_not be_nil + + result[1].attribute_loaded?(:notes).should be_true + result[1].attribute_loaded?(:trip_report).should be_true + result[1].attribute_loaded?(:miles).should be_false + + result = repository(ADAPTER) { RowBoat.all.to_a } + + result[0].attribute_loaded?(:trip_report).should be_false + result[0].attribute_loaded?(:miles).should be_false + + result[1].trip_report.should_not be_nil + result[2].attribute_loaded?(:miles).should be_true + end + + it "should lazy load on Property#set" do + repository(ADAPTER) do + boat = RowBoat.first + boat.attribute_loaded?(:notes).should be_false + boat.notes = 'New Note' + boat.original_values[:notes].should == "Note" + end + end + end + + describe 'defaults' do + before :all do + class Catamaran + include DataMapper::Resource + property :id, Serial + property :name, String + + # Boolean + property :could_be_bool0, TrueClass, :default => true + property :could_be_bool1, TrueClass, :default => false + end + + repository(ADAPTER){ Catamaran.auto_migrate!(ADAPTER) } + end + + before :each do + @cat = Catamaran.new + end + + it "should have defaults" do + @cat.could_be_bool0.should == true + @cat.could_be_bool1.should_not be_nil + @cat.could_be_bool1.should == false + + @cat.name = 'Mary Mayweather' + + repository(ADAPTER) do + @cat.save + + cat = Catamaran.first + cat.could_be_bool0.should == true + cat.could_be_bool1.should_not be_nil + cat.could_be_bool1.should == false + cat.destroy + end + + end + + it "should have defaults even with creates" do + repository(ADAPTER) do + Catamaran.create(:name => 'Jingle All The Way') + cat = Catamaran.first + cat.name.should == 'Jingle All The Way' + cat.could_be_bool0.should == true + cat.could_be_bool1.should_not be_nil + cat.could_be_bool1.should == false + end + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/query_spec.rb b/vendor/dm-core-0.9.6/spec/integration/query_spec.rb new file mode 100644 index 0000000..57fd626 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/query_spec.rb @@ -0,0 +1,506 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +if ADAPTER + module QuerySpec + class SailBoat + include DataMapper::Resource + + property :id, Serial + property :name, String + property :port, String + property :captain, String + + def self.default_repository_name + ADAPTER + end + end + + class Permission + include DataMapper::Resource + + property :id, Serial + property :user_id, Integer + property :resource_id, Integer + property :resource_type, String + property :token, String + + def self.default_repository_name + ADAPTER + end + end + + class Region + include DataMapper::Resource + + property :id, Serial + property :name, String + property :type, String + + def self.default_repository_name + ADAPTER + end + end + + class Factory + include DataMapper::Resource + + property :id, Serial + property :region_id, Integer + property :name, String + + repository(:mock) do + property :land, String + end + + belongs_to :region + + def self.default_repository_name + ADAPTER + end + end + + class Vehicle + include DataMapper::Resource + + property :id, Serial + property :factory_id, Integer + property :name, String + + belongs_to :factory + + def self.default_repository_name + ADAPTER + end + end + + class Group + include DataMapper::Resource + property :id, Serial + property :name, String + end + end + + module Namespace + class Region + include DataMapper::Resource + + property :id, Serial + property :name, String + + def self.default_repository_name + ADAPTER + end + end + + class Factory + include DataMapper::Resource + + property :id, Serial + property :region_id, Integer + property :name, String + + repository(:mock) do + property :land, String + end + + belongs_to :region + + def self.default_repository_name + ADAPTER + end + end + + class Vehicle + include DataMapper::Resource + property :id, Serial + property :factory_id, Integer + property :name, String + + belongs_to :factory + + def self.default_repository_name + ADAPTER + end + end + end + + describe DataMapper::Query, "with #{ADAPTER}" do + describe '#unique' do + include LoggingHelper + + before(:each) do + QuerySpec::SailBoat.auto_migrate! + + QuerySpec::SailBoat.create(:name => 'A', :port => 'C') + QuerySpec::SailBoat.create(:name => 'B', :port => 'B') + QuerySpec::SailBoat.create(:name => 'C', :port => 'A') + end + + def parse_statement(log) + log.readlines.join.chomp.split(' ~ ').last + end + + describe 'when true' do + if [ :postgres, :sqlite3, :mysql ].include?(ADAPTER) + it 'should add a GROUP BY to the SQL query' do + logger do |log| + QuerySpec::SailBoat.all(:unique => true, :fields => [ :id ]).to_a + + case ADAPTER + when :postgres, :sqlite3 + parse_statement(log).should == 'SELECT "id" FROM "query_spec_sail_boats" GROUP BY "id" ORDER BY "id"' + when :mysql + parse_statement(log).should == 'SELECT `id` FROM `query_spec_sail_boats` GROUP BY `id` ORDER BY `id`' + end + end + end + + it 'should not add a GROUP BY to the SQL query if no field is a Property' do + operator = DataMapper::Query::Operator.new(:thing, :test) + + # make the operator act like a Property + class << operator + property = QuerySpec::SailBoat.properties[:id] + (property.methods - (public_instance_methods - %w[ type ])).each do |method| + define_method(method) do |*args| + property.send(method, *args) + end + end + end + + operator.should_not be_kind_of(DataMapper::Property) + + logger do |log| + QuerySpec::SailBoat.all(:unique => true, :fields => [ operator ]).to_a + + case ADAPTER + when :postgres, :sqlite3 + parse_statement(log).should == 'SELECT "id" FROM "query_spec_sail_boats" ORDER BY "id"' + when :mysql + parse_statement(log).should == 'SELECT `id` FROM `query_spec_sail_boats` ORDER BY `id`' + end + end + end + end + end + + describe 'when false' do + if [ :postgres, :sqlite3, :mysql ].include?(ADAPTER) + it 'should not add a GROUP BY to the SQL query' do + logger do |log| + QuerySpec::SailBoat.all(:unique => false, :fields => [ :id ]).to_a + + case ADAPTER + when :postgres, :sqlite3 + parse_statement(log).should == 'SELECT "id" FROM "query_spec_sail_boats" ORDER BY "id"' + when :mysql + parse_statement(log).should == 'SELECT `id` FROM `query_spec_sail_boats` ORDER BY `id`' + end + end + end + end + end + end + + describe 'when ordering' do + before(:each) do + QuerySpec::SailBoat.auto_migrate! + + QuerySpec::SailBoat.create(:name => 'A', :port => 'C') + QuerySpec::SailBoat.create(:name => 'B', :port => 'B') + QuerySpec::SailBoat.create(:name => 'C', :port => 'A') + end + + it "should find by conditions" do + lambda do + repository(ADAPTER) do + QuerySpec::SailBoat.first(:conditions => [ 'name = ?', 'B' ]) + end + end.should_not raise_error + + lambda do + repository(ADAPTER) do + QuerySpec::SailBoat.first(:conditions => [ 'name = ?', 'A' ]) + end + end.should_not raise_error + end + + it "should find by conditions passed in as hash" do + repository(ADAPTER) do + QuerySpec::SailBoat.create(:name => "couldbe@email.com", :port => 'wee') + + find = QuerySpec::SailBoat.first(:name => 'couldbe@email.com') + find.name.should == 'couldbe@email.com' + + find = QuerySpec::SailBoat.first(:name => 'couldbe@email.com', :port.not => nil) + find.should_not be_nil + find.port.should_not be_nil + find.name.should == 'couldbe@email.com' + end + end + + it "should find by conditions passed in a range" do + repository(ADAPTER) do + find = QuerySpec::SailBoat.all(:id => 0..2) + find.should_not be_nil + find.should have(2).entries + + find = QuerySpec::SailBoat.all(:id.not => 0..2) + find.should have(1).entries + end + end + + it "should find by conditions passed in as an array" do + repository(ADAPTER) do + find = QuerySpec::SailBoat.all(:id => [1,2]) + find.should_not be_nil + find.should have(2).entries + + find = QuerySpec::SailBoat.all(:id.not => [1,2]) + find.should have(1).entries + end + end + + describe "conditions passed in as an empty array" do + it "should work when id is an empty Array" do + repository(ADAPTER) do + find = QuerySpec::SailBoat.all(:id => []) + find.should have(0).entries + end + end + + it "should work when id is NOT an empty Array" do + repository(ADAPTER) do + find = QuerySpec::SailBoat.all(:id.not => []) + find.should have(3).entries + end + end + + it "should work when id is an empty Array and other conditions are specified" do + repository(ADAPTER) do + find = QuerySpec::SailBoat.all(:id => [], :name => "A") + find.should have(0).entries + end + end + + it "should work when id is NOT an empty Array and other conditions are specified" do + repository(ADAPTER) do + find = QuerySpec::SailBoat.all(:id.not => [], :name => "A") + find.should have(1).entries + end + end + + it "should work when id is NOT an empty Array and other Array conditions are specified" do + repository(ADAPTER) do + find = QuerySpec::SailBoat.all(:id.not => [], :name => ["A", "B"]) + find.should have(2).entries + end + end + end + + it "should order results" do + repository(ADAPTER) do + result = QuerySpec::SailBoat.all(:order => [ + DataMapper::Query::Direction.new(QuerySpec::SailBoat.properties[:name], :asc) + ]) + result[0].id.should == 1 + + result = QuerySpec::SailBoat.all(:order => [ + DataMapper::Query::Direction.new(QuerySpec::SailBoat.properties[:port], :asc) + ]) + result[0].id.should == 3 + + result = QuerySpec::SailBoat.all(:order => [ + DataMapper::Query::Direction.new(QuerySpec::SailBoat.properties[:name], :asc), + DataMapper::Query::Direction.new(QuerySpec::SailBoat.properties[:port], :asc) + ]) + result[0].id.should == 1 + + result = QuerySpec::SailBoat.all(:order => [ + QuerySpec::SailBoat.properties[:name], + DataMapper::Query::Direction.new(QuerySpec::SailBoat.properties[:port], :asc) + ]) + result[0].id.should == 1 + + result = QuerySpec::SailBoat.all(:order => [ :name ]) + result[0].id.should == 1 + + result = QuerySpec::SailBoat.all(:order => [ :name.desc ]) + result[0].id.should == 3 + end + end + end + + describe 'when sub-selecting' do + before(:each) do + [ QuerySpec::SailBoat, QuerySpec::Permission ].each { |m| m.auto_migrate! } + + QuerySpec::SailBoat.create(:id => 1, :name => "Fantasy I", :port => "Cape Town", :captain => 'Joe') + QuerySpec::SailBoat.create(:id => 2, :name => "Royal Flush II", :port => "Cape Town", :captain => 'James') + QuerySpec::SailBoat.create(:id => 3, :name => "Infringer III", :port => "Cape Town", :captain => 'Jason') + + #User 1 permission -- read boat 1 & 2 + QuerySpec::Permission.create(:id => 1, :user_id => 1, :resource_id => 1, :resource_type => 'SailBoat', :token => 'READ') + QuerySpec::Permission.create(:id => 2, :user_id => 1, :resource_id => 2, :resource_type => 'SailBoat', :token => 'READ') + + #User 2 permission -- read boat 2 & 3 + QuerySpec::Permission.create(:id => 3, :user_id => 2, :resource_id => 2, :resource_type => 'SailBoat', :token => 'READ') + QuerySpec::Permission.create(:id => 4, :user_id => 2, :resource_id => 3, :resource_type => 'SailBoat', :token => 'READ') + end + + it 'should accept a DM::Query as a value of a condition' do + # User 1 + acl = DataMapper::Query.new(repository(ADAPTER), QuerySpec::Permission, :user_id => 1, :resource_type => 'SailBoat', :token => 'READ', :fields => [ :resource_id ]) + query = { :port => 'Cape Town', :id => acl, :captain.like => 'J%', :order => [ :id ] } + boats = repository(ADAPTER) { QuerySpec::SailBoat.all(query) } + boats.should have(2).entries + boats.entries[0].id.should == 1 + boats.entries[1].id.should == 2 + + # User 2 + acl = DataMapper::Query.new(repository(ADAPTER), QuerySpec::Permission, :user_id => 2, :resource_type => 'SailBoat', :token => 'READ', :fields => [ :resource_id ]) + query = { :port => 'Cape Town', :id => acl, :captain.like => 'J%', :order => [ :id ] } + boats = repository(ADAPTER) { QuerySpec::SailBoat.all(query) } + + boats.should have(2).entries + boats.entries[0].id.should == 2 + boats.entries[1].id.should == 3 + end + + it 'when value is NOT IN another query' do + # Boats that User 1 Cannot see + acl = DataMapper::Query.new(repository(ADAPTER), QuerySpec::Permission, :user_id => 1, :resource_type => 'SailBoat', :token => 'READ', :fields => [ :resource_id ]) + query = { :port => 'Cape Town', :id.not => acl, :captain.like => 'J%' } + boats = repository(ADAPTER) { QuerySpec::SailBoat.all(query) } + boats.should have(1).entries + boats.entries[0].id.should == 3 + end + end # describe sub-selecting + + describe 'when linking associated objects' do + before(:each) do + [ QuerySpec::Region, QuerySpec::Factory, QuerySpec::Vehicle ].each { |m| m.auto_migrate! } + + QuerySpec::Region.create(:id => 1, :name => 'North West', :type => 'commercial') + QuerySpec::Factory.create(:id => 2000, :region_id => 1, :name => 'North West Plant') + QuerySpec::Vehicle.create(:id => 1, :factory_id => 2000, :name => '10 ton delivery truck') + + Namespace::Region.auto_migrate! + Namespace::Factory.auto_migrate! + Namespace::Vehicle.auto_migrate! + + Namespace::Region.create(:id => 1, :name => 'North West') + Namespace::Factory.create(:id => 2000, :region_id => 1, :name => 'North West Plant') + Namespace::Vehicle.create(:id => 1, :factory_id => 2000, :name => '10 ton delivery truck') + end + + it 'should require that all properties in :fields and all :links come from the same repository' #do + # land = QuerySpec::Factory.properties(:mock)[:land] + # fields = [] + # QuerySpec::Vehicle.properties(ADAPTER).map do |property| + # fields << property + # end + # fields << land + # + # lambda{ + # begin + # results = repository(ADAPTER) { QuerySpec::Vehicle.all(:links => [ :factory ], :fields => fields) } + # rescue RuntimeError + # $!.message.should == "Property QuerySpec::Factory.land not available in repository #{ADAPTER}" + # raise $! + # end + # }.should raise_error(RuntimeError) + #end + + it 'should accept a DM::Assoc::Relationship as a link' do + factory = DataMapper::Associations::Relationship.new( + :factory, + ADAPTER, + 'QuerySpec::Vehicle', + 'QuerySpec::Factory', + { :child_key => [ :factory_id ], :parent_key => [ :id ] } + ) + results = repository(ADAPTER) { QuerySpec::Vehicle.all(:links => [ factory ]) } + results.should have(1).entries + end + + it 'should accept a symbol of an association name as a link' do + results = repository(ADAPTER) { QuerySpec::Vehicle.all(:links => [ :factory ]) } + results.should have(1).entries + end + + it 'should accept a string of an association name as a link' do + results = repository(ADAPTER) { QuerySpec::Vehicle.all(:links => [ 'factory' ]) } + results.should have(1).entries + end + + it 'should accept a mixture of items as a set of links' do + region = DataMapper::Associations::Relationship.new( + :region, + ADAPTER, + 'QuerySpec::Factory', + 'QuerySpec::Region', + { :child_key => [ :region_id ], :parent_key => [ :id ] } + ) + results = repository(ADAPTER) { QuerySpec::Vehicle.all(:links => [ 'factory', region ]) } + results.should have(1).entries + end + + it 'should only accept a DM::Assoc::Relationship, String & Symbol as a link' do + lambda{ + DataMapper::Query.new(repository(ADAPTER), QuerySpec::Vehicle, :links => [1]) + }.should raise_error(ArgumentError) + end + + it 'should have a association by the name of the Symbol or String' do + lambda{ + DataMapper::Query.new(repository(ADAPTER), QuerySpec::Vehicle, :links => [ 'Sailing' ]) + }.should raise_error(ArgumentError) + + lambda{ + DataMapper::Query.new(repository(ADAPTER), QuerySpec::Vehicle, :links => [ :sailing ]) + }.should raise_error(ArgumentError) + end + + it 'should create an n-level query path' do + QuerySpec::Vehicle.factory.region.model.should == QuerySpec::Region + QuerySpec::Vehicle.factory.region.name.property.should == QuerySpec::Region.properties(QuerySpec::Region.repository.name)[ :name ] + end + + it 'should accept a DM::QueryPath as the key to a condition' do + vehicle = QuerySpec::Vehicle.first(QuerySpec::Vehicle.factory.region.name => 'North West') + vehicle.name.should == '10 ton delivery truck' + + vehicle = Namespace::Vehicle.first(Namespace::Vehicle.factory.region.name => 'North West') + vehicle.name.should == '10 ton delivery truck' + end + + it "should accept a string representing a DM::QueryPath as they key to a condition" do + vehicle = QuerySpec::Vehicle.first("factory.region.name" => 'North West') + vehicle.name.should == '10 ton delivery truck' + end + + it "should accept 'id' and 'type' as endpoints on ah DM::QueryPath" do + vehicle = QuerySpec::Vehicle.first( QuerySpec::Vehicle.factory.region.type => 'commercial' ) + vehicle.name.should == '10 ton delivery truck' + vehicle = QuerySpec::Vehicle.first( QuerySpec::Vehicle.factory.region.id => 1 ) + vehicle.name.should == '10 ton delivery truck' + end + + it 'should auto generate the link if a DM::Property from a different resource is in the :fields option' + + it 'should create links with composite keys' + + it 'should eager load associations' do + repository(ADAPTER) do + vehicle = QuerySpec::Vehicle.first(:includes => [ QuerySpec::Vehicle.factory ]) + end + end + + it "should behave when using mocks" do + QuerySpec::Group.should_receive(:all).with(:order => [ :id.asc ]) + QuerySpec::Group.all(:order => [ :id.asc ]) + end + end # describe links + end # DM::Query +end diff --git a/vendor/dm-core-0.9.6/spec/integration/repository_spec.rb b/vendor/dm-core-0.9.6/spec/integration/repository_spec.rb new file mode 100644 index 0000000..73bc34a --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/repository_spec.rb @@ -0,0 +1,57 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +if ADAPTER + describe DataMapper::Repository, "with #{ADAPTER}" do + before :all do + class SerialFinderSpec + include DataMapper::Resource + + property :id, Serial + property :sample, String + + auto_migrate!(ADAPTER) + end + + repository(ADAPTER).create((0...100).map { SerialFinderSpec.new(:sample => rand.to_s) }) + end + + before do + @repository = repository(ADAPTER) + @model = SerialFinderSpec + @query = DataMapper::Query.new(@repository, @model) + end + + it "should throw an exception if the named repository is unknown" do + r = DataMapper::Repository.new(:completely_bogus) + lambda { r.adapter }.should raise_error(ArgumentError) + end + + it "should return all available rows" do + @repository.read_many(@query).should have(100).entries + end + + it "should allow limit and offset" do + @repository.read_many(@query.merge(:limit => 50)).should have(50).entries + + collection = @repository.read_many(@query.merge(:limit => 20, :offset => 40)) + collection.should have(20).entries + collection.map { |entry| entry.id }.should == @repository.read_many(@query)[40...60].map { |entry| entry.id } + end + + it "should lazy-load missing attributes" do + sfs = @repository.read_one(@query.merge(:fields => [ :id ], :limit => 1)) + sfs.should be_a_kind_of(@model) + sfs.should_not be_a_new_record + + sfs.attribute_loaded?(:sample).should be_false + sfs.sample.should_not be_nil + end + + it "should translate an Array to an IN clause" do + ids = @repository.read_many(@query.merge(:fields => [ :id ], :limit => 10)).map { |entry| entry.id } + results = @repository.read_many(@query.merge(:id => ids)) + + results.map { |entry| entry.id }.should == ids + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/resource_spec.rb b/vendor/dm-core-0.9.6/spec/integration/resource_spec.rb new file mode 100644 index 0000000..be24d71 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/resource_spec.rb @@ -0,0 +1,475 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +# ------------------------------------------------------------ +# ----- Read SPECS for information about how to read ----- +# ----- and contribute to the DataMapper specs. ----- +# ------------------------------------------------------------ + +if ADAPTER + describe "DataMapper::Resource with #{ADAPTER}" do + + load_models_for_metaphor :zoo + + before(:each) do + DataMapper.auto_migrate!(ADAPTER) + @zoo = Zoo.new(:name => "San Francisco") + repository(ADAPTER) { @zoo.save } + end + + # --- Move somewhere ---- + it "should be able to destroy objects" do + lambda { @zoo.destroy.should be_true }.should_not raise_error + end + + describe '#attribute_get' do + it 'should provide #attribute_get' do + Zoo.new.should respond_to(:attribute_get) + end + + it 'should delegate to Property#get' do + Zoo.properties[:name].should_receive(:get).with(zoo = Zoo.new) + zoo.name + end + + it "should return Property#get's return value" do + Zoo.properties[:name].should_receive(:get).and_return("San Francisco") + Zoo.new.name.should == "San Francisco" + end + end + + describe '#attribute_set' do + it "should provide #attribute_set" do + Zoo.new.should respond_to(:attribute_set) + end + + it 'should delegate to Property#set' do + Zoo.properties[:name].should_receive(:set).with(zoo = Zoo.new, "San Francisco") + zoo.name = "San Francisco" + end + end + + describe '#eql?' do + it "should return true if the objects are the same instances" + it "should return false if the other object is not an instance of the same model" + it "should return false if the other object is a different class" + it "should return true if the repositories are the same and the primary key is the same" + it "should return true if all the properties are the same" + it "should return false if any of the properties are different" + end + + describe '#hash' do + it "should return the same hash values for unsaved objects that are equal" do + e1 = Employee.new(:name => "John") + e2 = Employee.new(:name => "John") + e1.hash.should == e2.hash + end + + it "should return the same hash values for saved objects that are equal" do + # Make sure that the object_id's are not the same + e1 = e2 = nil + repository(ADAPTER) do + e1 = Employee.create(:name => "John") + end + repository(ADAPTER) do + e2 = Employee.get("John") + end + e1.hash.should == e2.hash + end + + it "should return a different hash value for different objects of the same type" do + repository(ADAPTER) do + e1 = Employee.create(:name => "John") + e2 = Employee.create(:name => "Dan") + e1.hash.should_not == e2.hash + end + end + + it "should return a different hash value for different types of objects with the same key" + end + + describe '#id' do + it "should be awesome" + end + + describe '#inspect' do + it "should return a string representing the object" + end + + describe '#key' do + describe "original_value[:key]" do + it "should be used when an existing resource's key changes" do + repository(ADAPTER) do + employee = Employee.create(:name => "John") + employee.name = "Jon" + employee.key.should == ["John"] + end + end + + it "should be used when saving an existing resource" do + repository(ADAPTER) do + employee = Employee.create(:name => "John") + employee.name = "Jon" + employee.save.should == true + Employee.get("Jon").should == employee + end + end + + it "should not be used when a new resource's key changes" do + employee = Employee.new(:name => "John") + employee.name = "Jon" + employee.key.should == ["Jon"] + end + end + end + + describe '#pretty_print' do + it "should display a pretty version of inspect" + end + + describe '#save' do + + describe 'with a new resource' do + it 'should set defaults before create' + it 'should create when dirty' + it 'should create when non-dirty, and it has a serial key' + end + + describe 'with an existing resource' do + it 'should update' + end + + end + + describe '#repository' do + it "should return the repository associated with the object if there is one" + it "should return the repository associated with the model if the object doesn't have one" + end + end +end + + + + +# ---------- Old specs... BOOOOOOOOOO --------------- +if ADAPTER + class Orange + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :name, String, :key => true + property :color, String + end + + class Apple + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :color, String, :default => 'green', :nullable => true + end + + class FortunePig + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + + def to_s + name + end + + after :create do + @created_id = self.id + end + + after :save do + @save_id = self.id + end + end + + class Car + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :brand, String, :key => true + property :color, String + property :created_on, Date + property :touched_on, Date + property :updated_on, Date + + before :save do + self.touched_on = Date.today + end + + before :create do + self.created_on = Date.today + end + + before :update do + self.updated_on = Date.today + end + end + + class Male + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :name, String + property :iq, Integer, :default => 100 + property :type, Discriminator + property :data, Object + + def iq=(i) + attribute_set(:iq, i - 1) + end + end + + class Bully < Male; end + + class Mugger < Bully; end + + class Maniac < Bully; end + + class Psycho < Maniac; end + + class Geek < Male + property :awkward, Boolean, :default => true + + def iq=(i) + attribute_set(:iq, i + 30) + end + end + + class Flanimal + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :type, Discriminator + property :name, String + end + + class Sprog < Flanimal; end + + describe "DataMapper::Resource with #{ADAPTER}" do + before :all do + Orange.auto_migrate!(ADAPTER) + Apple.auto_migrate!(ADAPTER) + FortunePig.auto_migrate!(ADAPTER) + + orange = Orange.new(:color => 'orange') + orange.name = 'Bob' # Keys are protected from mass-assignment by default. + repository(ADAPTER) { orange.save } + end + + it "should be able to overwrite Resource#to_s" do + repository(ADAPTER) do + ted = FortunePig.create(:name => "Ted") + FortunePig.get!(ted.id).to_s.should == 'Ted' + end + end + + it "should be able to destroy objects" do + apple = Apple.create(:color => 'Green') + lambda do + apple.destroy.should be_true + end.should_not raise_error + end + + it 'should return false to #destroy if the resource is new' do + Apple.new.destroy.should be_false + end + + it "should be able to reload objects" do + orange = repository(ADAPTER) { Orange.get!('Bob') } + orange.color.should == 'orange' + orange.color = 'blue' + orange.color.should == 'blue' + orange.reload + orange.color.should == 'orange' + end + + it "should be able to reload new objects" do + repository(ADAPTER) do + Orange.create(:name => 'Tom').reload + end + end + + it "should be able to find first or create objects" do + repository(ADAPTER) do + orange = Orange.create(:name => 'Naval') + + Orange.first_or_create(:name => 'Naval').should == orange + + purple = Orange.first_or_create(:name => 'Purple', :color => 'Fuschia') + oranges = Orange.all(:name => 'Purple') + oranges.size.should == 1 + oranges.first.should == purple + end + end + + it "should be able to override a default with a nil" do + repository(ADAPTER) do + apple = Apple.new + apple.color = nil + apple.save + apple.color.should be_nil + + apple = Apple.create(:color => nil) + apple.color.should be_nil + end + end + + it "should be able to respond to create hooks" do + bob = repository(ADAPTER) { FortunePig.create(:name => 'Bob') } + bob.id.should_not be_nil + bob.instance_variable_get("@created_id").should == bob.id + + fred = FortunePig.new(:name => 'Fred') + repository(ADAPTER) { fred.save } + fred.id.should_not be_nil + fred.instance_variable_get("@save_id").should == fred.id + end + + it "should be dirty when Object properties are changed" do + # pending "Awaiting Property#track implementation" + repository(ADAPTER) do + Male.auto_migrate! + end + repository(ADAPTER) do + bob = Male.create(:name => "Bob", :data => {}) + bob.dirty?.should be_false + bob.data.merge!(:name => "Bob") + bob.dirty?.should be_true + bob = Male.first + bob.data[:name] = "Bob" + bob.dirty?.should be_true + end + end + + describe "hooking" do + before :all do + Car.auto_migrate!(ADAPTER) + end + + it "should execute hooks before creating/updating objects" do + repository(ADAPTER) do + c1 = Car.new(:brand => 'BMW', :color => 'white') + + c1.new_record?.should == true + c1.created_on.should == nil + + c1.save + + c1.new_record?.should == false + c1.touched_on.should == Date.today + c1.created_on.should == Date.today + c1.updated_on.should == nil + + c1.color = 'black' + c1.save + + c1.updated_on.should == Date.today + end + + end + + end + + describe "inheritance" do + before :all do + Geek.auto_migrate!(ADAPTER) + + repository(ADAPTER) do + Male.create(:name => 'John Dorian') + Bully.create(:name => 'Bob', :iq => 69) + Geek.create(:name => 'Steve', :awkward => false, :iq => 132) + Geek.create(:name => 'Bill', :iq => 150) + Bully.create(:name => 'Johnson') + Mugger.create(:name => 'Frank') + Maniac.create(:name => 'William') + Psycho.create(:name => 'Norman') + end + + Flanimal.auto_migrate!(ADAPTER) + + end + + it "should test bug ticket #302" do + repository(ADAPTER) do + Sprog.create(:name => 'Marty') + Sprog.first(:name => 'Marty').should_not be_nil + end + end + + it "should select appropriate types" do + repository(ADAPTER) do + males = Male.all + males.should have(8).entries + + males.each do |male| + male.class.name.should == male.type.name + end + + Male.first(:name => 'Steve').should be_a_kind_of(Geek) + Bully.first(:name => 'Bob').should be_a_kind_of(Bully) + Geek.first(:name => 'Steve').should be_a_kind_of(Geek) + Geek.first(:name => 'Bill').should be_a_kind_of(Geek) + Bully.first(:name => 'Johnson').should be_a_kind_of(Bully) + Male.first(:name => 'John Dorian').should be_a_kind_of(Male) + end + end + + it "should not select parent type" do + repository(ADAPTER) do + Male.first(:name => 'John Dorian').should be_a_kind_of(Male) + Geek.first(:name => 'John Dorian').should be_nil + Geek.first.iq.should > Bully.first.iq + end + end + + it "should select objects of all inheriting classes" do + repository(ADAPTER) do + Male.all.should have(8).entries + Geek.all.should have(2).entries + Bully.all.should have(5).entries + Mugger.all.should have(1).entries + Maniac.all.should have(2).entries + Psycho.all.should have(1).entries + end + end + + it "should inherit setter method from parent" do + repository(ADAPTER) do + Bully.first(:name => "Bob").iq.should == 68 + end + end + + it "should be able to overwrite a setter in a child class" do + repository(ADAPTER) do + Geek.first(:name => "Bill").iq.should == 180 + end + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/sqlite3_adapter_spec.rb b/vendor/dm-core-0.9.6/spec/integration/sqlite3_adapter_spec.rb new file mode 100644 index 0000000..1d40116 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/sqlite3_adapter_spec.rb @@ -0,0 +1,352 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +if HAS_SQLITE3 + describe DataMapper::Adapters::Sqlite3Adapter do + before :all do + @adapter = repository(:sqlite3).adapter + end + + describe "auto migrating" do + before :all do + class Sputnik + include DataMapper::Resource + + property :id, Serial + property :name, DM::Text + end + end + + it "#upgrade_model should work" do + @adapter.destroy_model_storage(repository(:sqlite3), Sputnik) + @adapter.storage_exists?("sputniks").should == false + Sputnik.auto_migrate!(:sqlite3) + @adapter.storage_exists?("sputniks").should == true + @adapter.field_exists?("sputniks", "new_prop").should == false + Sputnik.property :new_prop, Integer + Sputnik.auto_upgrade!(:sqlite3) + @adapter.field_exists?("sputniks", "new_prop").should == true + end + end + + describe "querying metadata" do + before :all do + class Sputnik + include DataMapper::Resource + + property :id, Serial + property :name, DM::Text + end + end + + before do + Sputnik.auto_migrate!(:sqlite3) + end + + it "#storage_exists? should return true for tables that exist" do + @adapter.storage_exists?("sputniks").should == true + end + + it "#storage_exists? should return false for tables that don't exist" do + @adapter.storage_exists?("space turds").should == false + end + + it "#field_exists? should return true for columns that exist" do + @adapter.field_exists?("sputniks", "name").should == true + end + + it "#storage_exists? should return false for tables that don't exist" do + @adapter.field_exists?("sputniks", "plur").should == false + end + end + + describe "database file handling" do + it "should preserve the file path for file-based databases" do + file = 'newfile.db' + DataMapper.setup(:sqlite3file, "sqlite3:#{file}") + adapter = repository(:sqlite3file).adapter + adapter.uri.path.should == file + end + + it "should have a path of just :memory: when using memory databases" do + DataMapper.setup(:sqlite3memory, "sqlite3::memory:") + adapter = repository(:sqlite3memory).adapter + adapter.uri.path.should == ':memory:' + end + end + + describe "handling transactions" do + before :all do + class Sputnik + include DataMapper::Resource + + property :id, Serial + property :name, DM::Text + end + end + + before do + Sputnik.auto_migrate!(:sqlite3) + + @transaction = DataMapper::Transaction.new(@adapter) + end + + it "should rollback changes when #rollback_transaction is called" do + @transaction.commit do |transaction| + @adapter.execute("INSERT INTO sputniks (name) VALUES ('my pretty sputnik')") + transaction.rollback + end + @adapter.query("SELECT * FROM sputniks WHERE name = 'my pretty sputnik'").empty?.should == true + end + + it "should commit changes when #commit_transaction is called" do + @transaction.commit do + @adapter.execute("INSERT INTO sputniks (name) VALUES ('my pretty sputnik')") + end + @adapter.query("SELECT * FROM sputniks WHERE name = 'my pretty sputnik'").size.should == 1 + end + end + + describe "reading & writing a database" do + before :all do + class User + include DataMapper::Resource + + property :id, Serial + property :name, DM::Text + end + end + + before do + User.auto_migrate!(:sqlite3) + + @adapter.execute("INSERT INTO users (name) VALUES ('Paul')") + end + + it 'should be able to #execute an arbitrary query' do + result = @adapter.execute("INSERT INTO users (name) VALUES ('Sam')") + + result.affected_rows.should == 1 + end + + it 'should be able to #query' do + result = @adapter.query("SELECT * FROM users") + + result.should be_kind_of(Array) + row = result.first + row.should be_kind_of(Struct) + row.members.should == %w{id name} + + row.id.should == 1 + row.name.should == 'Paul' + end + + it 'should return an empty array if #query found no rows' do + @adapter.execute("DELETE FROM users") + + result = nil + lambda { result = @adapter.query("SELECT * FROM users") }.should_not raise_error + + result.should be_kind_of(Array) + result.size.should == 0 + end + end + + describe "CRUD for serial Key" do + before :all do + class VideoGame + include DataMapper::Resource + + property :id, Serial + property :name, String + property :object, Object + property :notes, Text + end + end + + before do + VideoGame.auto_migrate!(:sqlite3) + end + + it 'should be able to create a record' do + time = Time.now + game = repository(:sqlite3) do + game = VideoGame.new(:name => 'System Shock', :object => time, :notes => "Test") + game.save + game.should_not be_a_new_record + game.should_not be_dirty + game + end + repository(:sqlite3) do + saved = VideoGame.first(:name => 'System Shock') + saved.id.should == game.id + saved.notes.should == game.notes + saved.object.should == time + end + end + + it 'should be able to read a record' do + name = 'Wing Commander: Privateer' + id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?)', name).insert_id + + game = repository(:sqlite3) do + VideoGame.get(id) + end + + game.name.should == name + game.should_not be_dirty + game.should_not be_a_new_record + end + + it 'should be able to update a record' do + name = 'Resistance: Fall of Mon' + id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?)', name).insert_id + + game = repository(:sqlite3) do + VideoGame.get(id) + end + + game.name = game.name.sub(/Mon/, 'Man') + + game.should_not be_a_new_record + game.should be_dirty + + repository(:sqlite3) do + game.save + end + + game.should_not be_dirty + + clone = repository(:sqlite3) do + VideoGame.get(id) + end + + clone.name.should == game.name + end + + it 'should be able to delete a record' do + name = 'Zelda' + id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?)', name).insert_id + + game = repository(:sqlite3) do + VideoGame.get(id) + end + + game.name.should == name + + repository(:sqlite3) do + game.destroy.should be_true + end + game.should be_a_new_record + game.should be_dirty + end + + it 'should respond to Resource#get' do + name = 'Contra' + id = @adapter.execute('INSERT INTO "video_games" ("name") VALUES (?)', name).insert_id + + contra = repository(:sqlite3) { VideoGame.get(id) } + + contra.should_not be_nil + contra.should_not be_dirty + contra.should_not be_a_new_record + contra.id.should == id + end + end + + describe "CRUD for Composite Key" do + before :all do + class BankCustomer + include DataMapper::Resource + + property :bank, String, :key => true + property :account_number, String, :key => true + property :name, String + end + end + + before do + BankCustomer.auto_migrate!(:sqlite3) + end + + it 'should be able to create a record' do + customer = BankCustomer.new(:bank => 'Community Bank', :account_number => '123456', :name => 'David Hasselhoff') + repository(:sqlite3) do + customer.save + end + + customer.should_not be_a_new_record + customer.should_not be_dirty + + row = @adapter.query('SELECT "bank", "account_number" FROM "bank_customers" WHERE "name" = ?', customer.name).first + row.bank.should == customer.bank + row.account_number.should == customer.account_number + end + + it 'should be able to read a record' do + bank, account_number, name = 'Chase', '4321', 'Super Wonderful' + @adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name) + + repository(:sqlite3) do + BankCustomer.get(bank, account_number).name.should == name + end + end + + it 'should be able to update a record' do + bank, account_number, name = 'Wells Fargo', '00101001', 'Spider Pig' + @adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name) + + customer = repository(:sqlite3) do + BankCustomer.get(bank, account_number) + end + + customer.name = 'Bat-Pig' + + customer.should_not be_a_new_record + customer.should be_dirty + + repository(:sqlite3) do + customer.save + end + + customer.should_not be_dirty + + clone = repository(:sqlite3) do + BankCustomer.get(bank, account_number) + end + + clone.name.should == customer.name + end + + it 'should be able to delete a record' do + bank, account_number, name = 'Megacorp', 'ABC', 'Flash Gordon' + @adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name) + + customer = repository(:sqlite3) do + BankCustomer.get(bank, account_number) + end + + customer.name.should == name + + repository(:sqlite3) do + customer.destroy.should be_true + end + + customer.should be_a_new_record + customer.should be_dirty + end + + it 'should respond to Resource#get' do + bank, account_number, name = 'Conchords', '1100101', 'Robo Boogie' + @adapter.execute('INSERT INTO "bank_customers" ("bank", "account_number", "name") VALUES (?, ?, ?)', bank, account_number, name) + + robots = repository(:sqlite3) { BankCustomer.get(bank, account_number) } + + robots.should_not be_nil + robots.should_not be_dirty + robots.should_not be_a_new_record + robots.bank.should == bank + robots.account_number.should == account_number + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/sti_spec.rb b/vendor/dm-core-0.9.6/spec/integration/sti_spec.rb new file mode 100644 index 0000000..ffc9d5d --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/sti_spec.rb @@ -0,0 +1,230 @@ +require 'pathname' +require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper' + +if HAS_SQLITE3 + describe DataMapper::AutoMigrations, '.auto_migrate! on STI models with sqlite3' do + before :all do + @adapter = repository(:sqlite3).adapter + + @property_class = Struct.new(:name, :type, :nullable, :default, :serial) + + class Book + include DataMapper::Resource + + property :id, Serial + property :title, String, :nullable => false + property :isbn, Integer, :nullable => false + property :class_type, Discriminator + end + + class Propaganda < Book + property :marxist, Boolean, :nullable => false, :default => false + end + + class Fiction < Book + property :series, String + end + + class ShortStory < Fiction + property :moral, String + end + + class ScienceFiction < Fiction + property :aliens, Boolean + end + + class SpaceWestern < ScienceFiction + property :cowboys, Boolean + end + end + + describe "with the identity map" do + before :all do + Book.auto_migrate!(:sqlite3) + repository(:sqlite3) do + Propaganda.create(:title => "Something", :isbn => "129038") + end + end + + it "should find the base model in the identity map" do + repository(:sqlite3) do + book = Book.first + book.object_id.should == Propaganda.first.object_id + end + end + + it "should find the child model in the identity map" do + repository(:sqlite3) do + book = Propaganda.first + book.object_id.should == Book.first.object_id + end + end + end + + describe "with a parent class" do + before :all do + Book.auto_migrate!(:sqlite3).should be_true + + @table_set = @adapter.query('PRAGMA table_info("books")').inject({}) do |ts,column| + default = if 'NULL' == column.dflt_value || column.dflt_value.nil? + nil + else + /^(['"]?)(.*)\1$/.match(column.dflt_value)[2] + end + + property = @property_class.new( + column.name, + column.type.upcase, + column.notnull == 0, + default, + column.pk == 1 # in SQLite3 the serial key is also primary + ) + + ts.update(property.name => property) + end + + @index_list = @adapter.query('PRAGMA index_list("books")') + end + + it "should create the child class property columns" do + @table_set.keys.should include("series", "marxist") + end + + it "should create all property columns of the child classes in the inheritance tree" do + @table_set.keys.should include("moral") + end + end + + describe "with a child class" do + before :all do + Propaganda.auto_migrate!(:sqlite3).should be_true + + @table_set = @adapter.query('PRAGMA table_info("books")').inject({}) do |ts,column| + default = if 'NULL' == column.dflt_value || column.dflt_value.nil? + nil + else + /^(['"]?)(.*)\1$/.match(column.dflt_value)[2] + end + + property = @property_class.new( + column.name, + column.type.upcase, + column.notnull == 0, + default, + column.pk == 1 # in SQLite3 the serial key is also primary + ) + + ts.update(property.name => property) + end + + @index_list = @adapter.query('PRAGMA index_list("books")') + end + + it "should create the parent class' property columns" do + @table_set.keys.should include("id", "title", "isbn") + end + end + + describe "with a child class with it's own child class" do + before :all do + Fiction.auto_migrate!(:sqlite3).should be_true + + @table_set = @adapter.query('PRAGMA table_info("books")').inject({}) do |ts,column| + default = if 'NULL' == column.dflt_value || column.dflt_value.nil? + nil + else + /^(['"]?)(.*)\1$/.match(column.dflt_value)[2] + end + + property = @property_class.new( + column.name, + column.type.upcase, + column.notnull == 0, + default, + column.pk == 1 # in SQLite3 the serial key is also primary + ) + + ts.update(property.name => property) + end + + @index_list = @adapter.query('PRAGMA index_list("books")') + end + + it "should create the parent class' property columns" do + @table_set.keys.should include("id", "title", "isbn") + end + + it "should create the child class' property columns" do + @table_set.keys.should include("moral") + end + end + + describe "with a nephew class" do + before :all do + ShortStory.auto_migrate!(:sqlite3).should be_true + + @table_set = @adapter.query('PRAGMA table_info("books")').inject({}) do |ts,column| + default = if 'NULL' == column.dflt_value || column.dflt_value.nil? + nil + else + /^(['"]?)(.*)\1$/.match(column.dflt_value)[2] + end + + property = @property_class.new( + column.name, + column.type.upcase, + column.notnull == 0, + default, + column.pk == 1 # in SQLite3 the serial key is also primary + ) + + ts.update(property.name => property) + end + @index_list = @adapter.query('PRAGMA index_list("books")') + end + + + it "should create the grandparent class' property columns" do + @table_set.keys.should include("id", "title", "isbn") + end + + it "should create the uncle class' property columns" do + @table_set.keys.should include("marxist") + end + end + + describe "with a great-grandchild class" do + it "should inherit its parent's properties" do + SpaceWestern.properties[:aliens].should_not be_nil + end + it "should inherit its grandparent's properties" do + SpaceWestern.properties[:series].should_not be_nil + end + it "should inherit its great-granparent's properties" do + SpaceWestern.properties[:title].should_not be_nil + end + end + + describe "with a child class" do + before :all do + Book.auto_migrate!(:sqlite3) + repository(:sqlite3) do + ShortStory.create( + :title => "The Science of Happiness", + :isbn => "129038", + :moral => "Bullshit might get you to the top, but it won't keep you there.") + end + end + + it "should be able to access the properties from the parent collection" do + repository(:sqlite3) do + Book.all.each do |book| + book.title.should_not be_nil + book.isbn.should_not be_nil + book.moral.should_not be_nil + end + end + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/strategic_eager_loading_spec.rb b/vendor/dm-core-0.9.6/spec/integration/strategic_eager_loading_spec.rb new file mode 100644 index 0000000..a5e21e8 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/strategic_eager_loading_spec.rb @@ -0,0 +1,153 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe "Strategic Eager Loading" do + include LoggingHelper + + before :all do + class Zoo + include DataMapper::Resource + def self.default_repository_name; ADAPTER end + + property :id, Serial + property :name, String + + has n, :exhibits + end + + class Exhibit + include DataMapper::Resource + def self.default_repository_name; ADAPTER end + + property :id, Serial + property :name, String + property :zoo_id, Integer + + belongs_to :zoo + has n, :animals + end + + class Animal + include DataMapper::Resource + def self.default_repository_name; ADAPTER end + + property :id, Serial + property :name, String + property :exhibit_id, Integer + + belongs_to :exhibit + end + + [Zoo, Exhibit, Animal].each { |k| k.auto_migrate!(ADAPTER) } + + repository(ADAPTER) do + Zoo.create(:name => "Dallas Zoo") + Exhibit.create(:name => "Primates", :zoo_id => 1) + Animal.create(:name => "Chimpanzee", :exhibit_id => 1) + Animal.create(:name => "Orangutan", :exhibit_id => 1) + + Zoo.create(:name => "San Diego") + Exhibit.create(:name => "Aviary", :zoo_id => 2) + Exhibit.create(:name => "Insectorium", :zoo_id => 2) + Exhibit.create(:name => "Bears", :zoo_id => 2) + Animal.create(:name => "Bald Eagle", :exhibit_id => 2) + Animal.create(:name => "Parakeet", :exhibit_id => 2) + Animal.create(:name => "Roach", :exhibit_id => 3) + Animal.create(:name => "Brown Bear", :exhibit_id => 4) + end + end + + it "should eager load children" do + zoo_ids = Zoo.all.map { |z| z.key } + exhibit_ids = Exhibit.all.map { |e| e.key } + + repository(ADAPTER) do + zoos = Zoo.all.entries # load all zoos + dallas = zoos.find { |z| z.name == 'Dallas Zoo' } + + logger do |log| + dallas.exhibits.entries # load all exhibits for zoos in identity_map + dallas.exhibits.size.should == 1 + log.readlines.size.should == 1 + end + + repository.identity_map(Zoo).keys.sort.should == zoo_ids + repository.identity_map(Exhibit).keys.sort.should == exhibit_ids + + logger do |log| + zoos.each { |zoo| zoo.exhibits.entries } # issues no queries + log.readlines.should be_empty + end + + dallas.exhibits << Exhibit.new(:name => "Reptiles") + dallas.exhibits.size.should == 2 + dallas.save + end + repository(ADAPTER) do + Zoo.first.exhibits.size.should == 2 + end + end + + it "should not eager load children when a query is provided" do + repository(ADAPTER) do + dallas = Zoo.all.entries.find { |z| z.name == 'Dallas Zoo' } + exhibits = dallas.exhibits.entries # load all exhibits + + reptiles, primates = nil, nil + + logger do |log| + reptiles = dallas.exhibits(:name => 'Reptiles') + reptiles.size.should == 1 + log.readlines.size.should == 1 + end + + logger do |log| + primates = dallas.exhibits(:name => 'Primates') + primates.size.should == 1 + log.readlines.size.should == 1 + end + + primates.should_not == reptiles + end + end + + it "should eager load parents" do + animal_ids = Animal.all.map { |a| a.key } + exhibit_ids = Exhibit.all.map { |e| e.key }.sort + exhibit_ids.pop # remove Reptile exhibit, which has no Animals + + repository(ADAPTER) do + animals = Animal.all.entries + bear = animals.find { |a| a.name == 'Brown Bear' } + + logger do |log| + bear.exhibit + log.readlines.size.should == 1 + end + + repository.identity_map(Animal).keys.sort.should == animal_ids + repository.identity_map(Exhibit).keys.sort.should == exhibit_ids + end + end + + it "should not eager load parents when parent is in IM" do + repository(ADAPTER) do + animal = Animal.first + exhibit = Exhibit.get(1) # load exhibit into IM + + logger do |log| + animal.exhibit # load exhibit from IM + log.readlines.should be_empty + end + + repository.identity_map(Exhibit).keys.should == [exhibit.key] + end + end + + it "should return a Collection when no children" do + Zoo.create(:name => 'Portland') + + Zoo.all.each do |zoo| + zoo.exhibits.should be_kind_of(DataMapper::Collection) + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/transaction_spec.rb b/vendor/dm-core-0.9.6/spec/integration/transaction_spec.rb new file mode 100644 index 0000000..5b4f464 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/transaction_spec.rb @@ -0,0 +1,75 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +# transaction capable adapters +ADAPTERS = [] +ADAPTERS << :postgres if HAS_POSTGRES +ADAPTERS << :mysql if HAS_MYSQL +ADAPTERS << :sqlite3 if HAS_SQLITE3 + +if ADAPTERS.any? + class Sputnik + include DataMapper::Resource + + property :id, Serial + property :name, DM::Text + end + + describe DataMapper::Transaction do + before :all do + @repositories = [] + + ADAPTERS.each do |name| + @repositories << repository(name) + end + end + + before :each do + ADAPTERS.each do |name| + Sputnik.auto_migrate!(name) + end + end + + it "should commit changes to all involved adapters on a two phase commit" do + DataMapper::Transaction.new(*@repositories) do + ADAPTERS.each do |name| + repository(name) { Sputnik.create(:name => 'hepp') } + end + end + + ADAPTERS.each do |name| + repository(name) { Sputnik.all.size.should == 1 } + end + end + + it "should not commit any changes if the block raises an exception" do + lambda do + DataMapper::Transaction.new(*@repositories) do + ADAPTERS.each do |name| + repository(name) { Sputnik.create(:name => 'hepp') } + end + raise "plur" + end + end.should raise_error(Exception, /plur/) + + ADAPTERS.each do |name| + repository(name) { Sputnik.all.size.should == 0 } + end + end + + it "should not commit any changes if any of the adapters doesnt prepare properly" do + lambda do + DataMapper::Transaction.new(*@repositories) do |transaction| + ADAPTERS.each do |name| + repository(name) { Sputnik.create(:name => 'hepp') } + end + + transaction.primitive_for(@repositories.last.adapter).should_receive(:prepare).and_throw(Exception.new("I am the famous test exception")) + end + end.should raise_error(Exception, /I am the famous test exception/) + + ADAPTERS.each do |name| + repository(name) { Sputnik.all.size.should == 0 } + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/integration/type_spec.rb b/vendor/dm-core-0.9.6/spec/integration/type_spec.rb new file mode 100644 index 0000000..84c42fa --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/integration/type_spec.rb @@ -0,0 +1,271 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +gem 'fastercsv', '>=1.2.3' +require 'fastercsv' + +if ADAPTER + module TypeTests + class Impostor < DataMapper::Type + primitive String + end + + class Coconut + include DataMapper::Resource + + storage_names[ADAPTER] = 'coconuts' + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :faked, Impostor + property :active, Boolean + property :note, Text + end + end + + class Lemon + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :color, String + property :deleted_at, DataMapper::Types::ParanoidDateTime + end + + class Lime + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :color, String + property :deleted_at, DataMapper::Types::ParanoidBoolean + end + + describe DataMapper::Type, "with #{ADAPTER}" do + before do + TypeTests::Coconut.auto_migrate!(ADAPTER) + + @document = <<-EOS.margin + NAME, RATING, CONVENIENCE + Freebird's, 3, 3 + Whataburger, 1, 5 + Jimmy John's, 3, 4 + Mignon, 5, 2 + Fuzi Yao's, 5, 1 + Blue Goose, 5, 1 + EOS + + @stuff = YAML::dump({ 'Happy Cow!' => true, 'Sad Cow!' => false }) + + @active = true + @note = "This is a note on our ol' guy bob" + end + + it "should instantiate an object with custom types" do + coconut = TypeTests::Coconut.new(:faked => 'bob', :active => @active, :note => @note) + coconut.faked.should == 'bob' + coconut.active.should be_a_kind_of(TrueClass) + coconut.note.should be_a_kind_of(String) + end + + it "should CRUD an object with custom types" do + repository(ADAPTER) do + coconut = TypeTests::Coconut.new(:faked => 'bob', :active => @active, :note => @note) + coconut.save.should be_true + coconut.id.should_not be_nil + + fred = TypeTests::Coconut.get!(coconut.id) + fred.faked.should == 'bob' + fred.active.should be_a_kind_of(TrueClass) + fred.note.should be_a_kind_of(String) + + note = "Seems like bob is just mockin' around" + fred.note = note + + fred.save.should be_true + + active = false + fred.active = active + + fred.save.should be_true + + # Can't call coconut.reload since coconut.collection isn't setup. + mac = TypeTests::Coconut.get!(fred.id) + mac.active.should == active + mac.note.should == note + end + end + + it "should respect paranoia with a datetime" do + Lemon.auto_migrate!(ADAPTER) + + lemon = nil + + repository(ADAPTER) do |repository| + lemon = Lemon.new + lemon.color = 'green' + + lemon.save + lemon.destroy + + lemon.deleted_at.should be_kind_of(DateTime) + end + + repository(ADAPTER) do |repository| + Lemon.all.should be_empty + Lemon.get(lemon.id).should be_nil + end + end + + it "should provide access to paranoid items with DateTime" do + Lemon.auto_migrate!(ADAPTER) + + lemon = nil + + repository(ADAPTER) do |repository| + %w(red green yellow blue).each do |color| + Lemon.create(:color => color) + end + + Lemon.all.size.should == 4 + Lemon.first.destroy + Lemon.all.size.should == 3 + Lemon.with_deleted{Lemon.all.size.should == 1} + end + end + + it "should set paranoid datetime to a date time" do + tmp = (DateTime.now - 0.5) + dt = DateTime.now + DateTime.stub!(:now).and_return(tmp) + + repository(ADAPTER) do |repository| + lemon = Lemon.new + lemon.color = 'green' + lemon.save + lemon.destroy + lemon.deleted_at.should == tmp + end + end + + it "should respect paranoia with a boolean" do + Lime.auto_migrate!(ADAPTER) + + lime = nil + + repository(ADAPTER) do |repository| + lime = Lime.new + lime.color = 'green' + + lime.save + lime.destroy + + lime.deleted_at.should be_kind_of(TrueClass) + end + + repository(ADAPTER) do |repository| + Lime.all.should be_empty + Lime.get(lime.id).should be_nil + end + end + + it "should provide access to paranoid items with Boolean" do + Lime.auto_migrate!(ADAPTER) + + lemon = nil + + repository(ADAPTER) do |repository| + %w(red green yellow blue).each do |color| + Lime.create(:color => color) + end + + Lime.all.size.should == 4 + Lime.first.destroy + Lime.all.size.should == 3 + Lime.with_deleted{Lime.all.size.should == 1} + end + end + + describe "paranoid types across repositories" do + before(:all) do + DataMapper::Repository.adapters[:alternate_paranoid] = repository(ADAPTER).adapter.dup + + Object.send(:remove_const, :Orange) if defined?(Orange) + class Orange + include DataMapper::Resource + + def self.default_repository_name + ADAPTER + end + + property :id, Serial + property :color, String + + repository(:alternate_paranoid) do + property :deleted, DataMapper::Types::ParanoidBoolean + property :deleted_at, DataMapper::Types::ParanoidDateTime + end + end + + repository(:alternate_paranoid){Orange.auto_migrate!} + end + + before(:each) do + %w(red orange blue green).each{|color| o = Orange.create(:color => color)} + end + + after(:each) do + Orange.repository.adapter.execute("DELETE FROM oranges") + end + + it "should setup the correct objects for the spec" do + repository(:alternate_paranoid){Orange.all.should have(4).items} + end + + it "should allow access the the default repository" do + Orange.all.should have(4).items + end + + it "should mark the objects as deleted in the alternate_paranoid repository" do + repository(:alternate_paranoid) do + Orange.first.destroy + Orange.all.should have(3).items + Orange.find_by_sql("SELECT * FROM oranges").should have(4).items + end + end + + it "should mark the objects as deleted in the alternate_paranoid repository but ignore it in the #{ADAPTER} repository" do + repository(:alternate_paranoid) do + Orange.first.destroy + end + Orange.all.should have(4).items + end + + it "should raise an error when trying to destroy from a repository that is not paranoid" do + lambda do + Orange.first.destroy + end.should raise_error(ArgumentError) + end + + it "should set all paranoid attributes on delete" do + repository(:alternate_paranoid) do + orange = Orange.first + orange.deleted.should be_false + orange.deleted_at.should be_nil + orange.destroy + + orange.deleted.should be_true + orange.deleted_at.should be_a_kind_of(DateTime) + end + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/lib/logging_helper.rb b/vendor/dm-core-0.9.6/spec/lib/logging_helper.rb new file mode 100644 index 0000000..6e14fc0 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/lib/logging_helper.rb @@ -0,0 +1,18 @@ +module LoggingHelper + def logger(adapter = ADAPTER, &block) + current_adapter = DataObjects.const_get(repository(adapter).adapter.uri.scheme.capitalize) + old_logger = current_adapter.logger + + log_path = File.join(SPEC_ROOT, "tmp.log") + handle = File.open(log_path, "a+") + current_adapter.logger = DataObjects::Logger.new(log_path, 0) + begin + yield(handle) + ensure + handle.truncate(0) + handle.close + current_adapter.logger = old_logger + File.delete(log_path) + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/lib/mock_adapter.rb b/vendor/dm-core-0.9.6/spec/lib/mock_adapter.rb new file mode 100644 index 0000000..2059d0a --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/lib/mock_adapter.rb @@ -0,0 +1,27 @@ +module DataMapper + module Adapters + class MockAdapter < DataMapper::Adapters::DataObjectsAdapter + + def create(resources) + 1 + end + + def exists?(storage_name) + true + end + + end + end +end + +module DataObjects + module Mock + + def self.logger + end + + def self.logger=(value) + end + + end +end diff --git a/vendor/dm-core-0.9.6/spec/lib/model_loader.rb b/vendor/dm-core-0.9.6/spec/lib/model_loader.rb new file mode 100644 index 0000000..9a6bbe7 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/lib/model_loader.rb @@ -0,0 +1,100 @@ +# --- +# Overview +# ======== +# ModelLoader is a method for loading methods models for specs in a way +# that will ensure that each spec will be an a pristine state when run. +# +# The problem is that if a spec needs to modify a model, the modifications +# should not carry over to the next spec. As such, all models are +# destroyed at the end of the spec and reloaded at the start. +# +# The second problem is that DataMapper::Resource keeps track +# of every class that it is included in. This is used for automigration. +# A number of specs run automigrate, and we don't want all the classes +# that were defined in other specs to be migrated as well. +# +# Usage +# ===== +# +# Sets the specified model metaphors to be loaded before each spec and +# destroyed after each spec in the current example group. This method +# can be used in a describe block or in a before block. +# +# ==== Parameters +# *metaphor:: The name of the metaphor to load (this is just the filename of +# file in specs/models) +# +# ==== Example +# +# describe "DataMapper::Associations" do +# +# load_models_for_metaphor :zoo, :blog +# +# it "should be awesome" do +# Zoo.new.should be_awesome +# end +# end +module ModelLoader + + def self.included(base) + base.extend(ClassMethods) + base.class_eval { include InstanceMethods } + # base.before(:each) { load_models(:global) } + base.after(:each) { unload_models } + end + + module ClassMethods + + def load_models_for_metaphor(*metaphors) + before(:each) { load_models_for_metaphor(*metaphors) } + end + + end + + module InstanceMethods + + def load_models_for_metaphor(*metaphors) + files = metaphors.map { |m| DataMapper.root / "spec" / "models" / "#{m}.rb" } + + klasses = object_space_classes.dup + files.each { |file| load file } + loaded_models.concat(object_space_classes - klasses) + end + + def unload_models + while model = loaded_models.pop + remove_model(model) + end + end + + def loaded_models + @loaded_models ||= [] + end + + private + + def object_space_classes + klasses = [] + ObjectSpace.each_object(Class) {|o| klasses << o} + klasses + end + + def remove_model(klass) + DataMapper::Resource.descendants.delete(klass) + # Check to see if the model is living inside a module + klass_name = klass.to_s + if klass_name.index("::") + mod = klass_name.match(/(\S+)::/)[1] + child_class = klass_name.match(/\S+::(\S+)/)[1] + + Object.const_get(mod).module_eval { remove_const child_class } + else + Object.module_eval { remove_const klass.to_s } + end + end + end +end + +Spec::Runner.configure do |config| + config.include(ModelLoader) +end diff --git a/vendor/dm-core-0.9.6/spec/lib/publicize_methods.rb b/vendor/dm-core-0.9.6/spec/lib/publicize_methods.rb new file mode 100644 index 0000000..c91f162 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/lib/publicize_methods.rb @@ -0,0 +1,28 @@ +class Class + def publicize_methods + klass = class << self; self; end + + saved_private_class_methods = klass.private_instance_methods + saved_protected_class_methods = klass.protected_instance_methods + saved_private_instance_methods = self.private_instance_methods + saved_protected_instance_methods = self.protected_instance_methods + + self.class_eval do + klass.send(:public, *saved_private_class_methods) + klass.send(:public, *saved_protected_class_methods) + public(*saved_private_instance_methods) + public(*saved_protected_instance_methods) + end + + begin + yield + ensure + self.class_eval do + klass.send(:private, *saved_private_class_methods) + klass.send(:protected, *saved_protected_class_methods) + private(*saved_private_instance_methods) + protected(*saved_protected_instance_methods) + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/models/content.rb b/vendor/dm-core-0.9.6/spec/models/content.rb new file mode 100644 index 0000000..c2decd2 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/models/content.rb @@ -0,0 +1,16 @@ +module Content + class Dialect + include DataMapper::Resource + + property :id, Serial + property :name, String + property :code, String + end + + class Locale + include DataMapper::Resource + + property :id, Serial + property :name, String + end +end diff --git a/vendor/dm-core-0.9.6/spec/models/vehicles.rb b/vendor/dm-core-0.9.6/spec/models/vehicles.rb new file mode 100644 index 0000000..a593a9e --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/models/vehicles.rb @@ -0,0 +1,34 @@ +# ========================== +# Used for Association specs +# --- +# These models will probably +# end up removed. So, I wouldn't +# use this metaphor +class Vehicle + include DataMapper::Resource + + property :id, Serial + property :name, String + + class << self + attr_accessor :mock_relationship + end +end + +class Manufacturer + include DataMapper::Resource + + property :id, Serial + property :name, String + + class << self + attr_accessor :mock_relationship + end +end + +class Supplier + include DataMapper::Resource + + property :id, Serial + property :name, String +end diff --git a/vendor/dm-core-0.9.6/spec/models/zoo.rb b/vendor/dm-core-0.9.6/spec/models/zoo.rb new file mode 100644 index 0000000..aaf32dc --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/models/zoo.rb @@ -0,0 +1,47 @@ +class Zoo + include DataMapper::Resource + + property :id, Serial + property :name, String + property :description, Text + property :inception, DateTime + property :open, Boolean, :default => false + property :size, Integer + + has n, :animals + + def to_s + name + end +end + +class Species + include DataMapper::Resource + + property :id, Serial + property :name, String + property :classification, String, :reader => :private + + has n, :animals +end + +class Animal + include DataMapper::Resource + + property :id, Serial + property :name, String + + belongs_to :zoo + belongs_to :species + belongs_to :keeper +end + +class Employee + include DataMapper::Resource + + property :name, String, :key => true +end + +class Keeper < Employee + has n, :animals +end diff --git a/vendor/dm-core-0.9.6/spec/spec.opts b/vendor/dm-core-0.9.6/spec/spec.opts new file mode 100644 index 0000000..45993bb --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/spec.opts @@ -0,0 +1,3 @@ +--colour +--loadby random +--format progress diff --git a/vendor/dm-core-0.9.6/spec/spec_helper.rb b/vendor/dm-core-0.9.6/spec/spec_helper.rb new file mode 100644 index 0000000..3137b66 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/spec_helper.rb @@ -0,0 +1,86 @@ +require 'rubygems' +gem 'rspec', '>=1.1.3' +require 'spec' +require 'pathname' + +SPEC_ROOT = Pathname(__FILE__).dirname.expand_path +require SPEC_ROOT.parent + 'lib/dm-core' + +# Load the various helpers for the spec suite +Dir[DataMapper.root / 'spec' / 'lib' / '*.rb'].each do |file| + require file +end + +# setup mock adapters +DataMapper.setup(:default, "sqlite3::memory:") +DataMapper.setup(:default2, "sqlite3::memory:") + +[ :mock, :legacy, :west_coast, :east_coast ].each do |repository_name| + DataMapper.setup(repository_name, "mock://localhost/#{repository_name}") +end + +# These environment variables will override the default connection string: +# MYSQL_SPEC_URI +# POSTGRES_SPEC_URI +# SQLITE3_SPEC_URI +# +# For example, in the bash shell, you might use: +# export MYSQL_SPEC_URI="mysql://localhost/dm_core_test?socket=/opt/local/var/run/mysql5/mysqld.sock" +# +def setup_adapter(name, default_uri) + begin + DataMapper.setup(name, ENV["#{name.to_s.upcase}_SPEC_URI"] || default_uri) + Object.const_set('ADAPTER', ENV['ADAPTER'].to_sym) if name.to_s == ENV['ADAPTER'] + true + rescue Exception => e + if name.to_s == ENV['ADAPTER'] + Object.const_set('ADAPTER', nil) + warn "Could not load #{name} adapter: #{e}" + end + false + end +end + +ENV['ADAPTER'] ||= 'sqlite3' + +HAS_SQLITE3 = setup_adapter(:sqlite3, 'sqlite3::memory:') +HAS_MYSQL = setup_adapter(:mysql, 'mysql://localhost/dm_core_test') +HAS_POSTGRES = setup_adapter(:postgres, 'postgres://postgres@localhost/dm_core_test') + +DataMapper::Logger.new(nil, :debug) + +# ---------------------------------------------------------------------- +# --- Do not declare new models unless absolutely necessary. Instead --- +# --- pick a metaphor and use those models. If you do need new --- +# --- models, define them according to the metaphor being used. --- +# ---------------------------------------------------------------------- + +Spec::Runner.configure do |config| + config.before(:each) do + # load_models_for_metaphor :vehicles + end +end + +# ---------------------------------------------------------------------- +# --- All these models are going to be removed. Don't use them!!! --- +# ---------------------------------------------------------------------- + +class Article + include DataMapper::Resource + + property :id, Serial + property :blog_id, Integer + property :created_at, DateTime + property :author, String + property :title, String +end + +class Comment + include DataMapper::Resource + + property :id, Serial # blah +end + +class NormalClass + # should not include DataMapper::Resource +end diff --git a/vendor/dm-core-0.9.6/spec/unit/adapters/abstract_adapter_spec.rb b/vendor/dm-core-0.9.6/spec/unit/adapters/abstract_adapter_spec.rb new file mode 100644 index 0000000..e1ca01d --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/adapters/abstract_adapter_spec.rb @@ -0,0 +1,133 @@ +require 'monitor' +require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'spec_helper')) +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'adapters', 'adapter_shared_spec')) + +describe DataMapper::Adapters::AbstractAdapter do + + before do + @adapter = DataMapper::Adapters::AbstractAdapter.new(:default, 'mock_uri_string') + end + + it_should_behave_like 'a DataMapper Adapter' + + describe "when handling transactions" do + before :each do + @transaction = DataMapper::Transaction.new(@adapter) + end + it "should be able to push and pop transactions on the current stack" do + @adapter.current_transaction.should == nil + @adapter.within_transaction?.should == false + @adapter.push_transaction(@transaction) + @adapter.current_transaction.should == @transaction + @adapter.within_transaction?.should == true + @adapter.push_transaction(@transaction) + @adapter.current_transaction.should == @transaction + @adapter.within_transaction?.should == true + @adapter.pop_transaction + @adapter.current_transaction.should == @transaction + @adapter.within_transaction?.should == true + @adapter.pop_transaction + @adapter.current_transaction.should == nil + @adapter.within_transaction?.should == false + end + it "should let each Thread have its own transaction stack" do + lock = Monitor.new + transaction2 = DataMapper::Transaction.new(@adapter) + @adapter.within_transaction?.should == false + @adapter.current_transaction.should == nil + @adapter.push_transaction(transaction2) + @adapter.within_transaction?.should == true + @adapter.current_transaction.should == transaction2 + lock.synchronize do + Thread.new do + @adapter.within_transaction?.should == false + @adapter.current_transaction.should == nil + @adapter.push_transaction(@transaction) + @adapter.within_transaction?.should == true + @adapter.current_transaction.should == @transaction + lock.synchronize do + @adapter.within_transaction?.should == true + @adapter.current_transaction.should == @transaction + @adapter.pop_transaction + @adapter.within_transaction?.should == false + @adapter.current_transaction.should == nil + end + end + @adapter.within_transaction?.should == true + @adapter.current_transaction.should == transaction2 + @adapter.pop_transaction + @adapter.within_transaction?.should == false + @adapter.current_transaction.should == nil + end + end + end + + it "should raise NotImplementedError when #create is called" do + lambda { @adapter.create([ :resource ]) }.should raise_error(NotImplementedError) + end + + it "should raise NotImplementedError when #read_many is called" do + lambda { @adapter.read_many(:query) }.should raise_error(NotImplementedError) + end + + it "should raise NotImplementedError when #read_one is called" do + lambda { @adapter.read_one(:query) }.should raise_error(NotImplementedError) + end + + it "should raise NotImplementedError when #update is called" do + lambda { @adapter.update(:attributes, :query) }.should raise_error(NotImplementedError) + end + + it "should raise NotImplementedError when #delete is called" do + lambda { @adapter.delete(:query) }.should raise_error(NotImplementedError) + end + + it "should raise NotImplementedError when #upgrade_model_storage is called" do + lambda { @adapter.upgrade_model_storage(:repository, :resource) }.should raise_error(NotImplementedError) + end + + it "should raise NotImplementedError when #storage_exists? is called" do + lambda { @adapter.storage_exists?("hehu") }.should raise_error(NotImplementedError) + end + + it "should raise NotImplementedError when #create_model_storage is called" do + lambda { @adapter.create_model_storage(:repository, :resource) }.should raise_error(NotImplementedError) + end + + it "should raise NotImplementedError when #destroy_model_storage is called" do + lambda { @adapter.destroy_model_storage(:repository, :resource) }.should raise_error(NotImplementedError) + end + + it "should raise NotImplementedError when #alter_model_storage is called" do + lambda { @adapter.alter_model_storage(:repository, :resource) }.should raise_error(NotImplementedError) + end + + it "should raise NotImplementedError when #create_property_storage is called" do + lambda { @adapter.create_property_storage(:repository, :property) } + end + + it "should raise NotImplementedError when #destroy_property_storage is called" do + lambda { @adapter.destroy_property_storage(:repository, :property) } + end + + it "should raise NotImplementedError when #alter_property_storage is called" do + lambda { @adapter.alter_property_storage(:repository, :property) } + end + + it "should raise NotImplementedError when #transaction_primitive is called" do + lambda { @adapter.transaction_primitive }.should raise_error(NotImplementedError) + end + + it "should clean out dead threads from @transactions" do + @adapter.instance_eval do @transactions end.size.should == 0 + t = Thread.new do + @adapter.push_transaction("plur") + end + while t.alive? + sleep 0.1 + end + @adapter.instance_eval do @transactions end.size.should == 1 + @adapter.push_transaction("ploj") + @adapter.instance_eval do @transactions end.size.should == 1 + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/adapters/adapter_shared_spec.rb b/vendor/dm-core-0.9.6/spec/unit/adapters/adapter_shared_spec.rb new file mode 100644 index 0000000..cb20282 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/adapters/adapter_shared_spec.rb @@ -0,0 +1,15 @@ + +describe "a DataMapper Adapter", :shared => true do + + it "should initialize the connection uri" do + new_adapter = @adapter.class.new(:default, Addressable::URI.parse('some://uri/string')) + new_adapter.instance_variable_get('@uri').to_s.should == Addressable::URI.parse('some://uri/string').to_s + end + + %w{create read_many read_one update delete create_model_storage alter_model_storage destroy_model_storage create_property_storage alter_property_storage destroy_property_storage} .each do |meth| + it "should have a #{meth} method" do + @adapter.should respond_to(meth.intern) + end + end + +end diff --git a/vendor/dm-core-0.9.6/spec/unit/adapters/data_objects_adapter_spec.rb b/vendor/dm-core-0.9.6/spec/unit/adapters/data_objects_adapter_spec.rb new file mode 100644 index 0000000..4a15afd --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/adapters/data_objects_adapter_spec.rb @@ -0,0 +1,628 @@ +require 'monitor' +require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'spec_helper')) + +require DataMapper.root / 'spec' / 'unit' / 'adapters' / 'adapter_shared_spec' + +# TODO: make a shared adapter spec for all the DAO objects to adhere to + +describe DataMapper::Adapters::DataObjectsAdapter do + before :all do + class Cheese + include DataMapper::Resource + property :id, Serial + property :name, String, :nullable => false + property :color, String, :default => 'yellow' + property :notes, String, :length => 100, :lazy => true + end + end + + before do + @uri = Addressable::URI.parse('mock://localhost') + @adapter = DataMapper::Adapters::DataObjectsAdapter.new(:default, @uri) + end + + it_should_behave_like 'a DataMapper Adapter' + + describe "#find_by_sql" do + + before do + class Plupp + include DataMapper::Resource + property :id, Integer, :key => true + property :name, String + end + end + + it "should be added to DataMapper::Model" do + DataMapper::Model.instance_methods.include?("find_by_sql").should == true + Plupp.should respond_to(:find_by_sql) + end + + describe "when called" do + + before do + @reader = mock("reader") + @reader.stub!(:next!).and_return(false) + @reader.stub!(:close) + @connection = mock("connection") + @connection.stub!(:close) + @command = mock("command") + @adapter = Plupp.repository.adapter + @repository = Plupp.repository + @repository.stub!(:adapter).and_return(@adapter) + @adapter.stub!(:create_connection).and_return(@connection) + @adapter.should_receive(:is_a?).any_number_of_times.with(DataMapper::Adapters::DataObjectsAdapter).and_return(true) + end + + it "should accept a single String argument with or without options hash" do + @connection.should_receive(:create_command).twice.with("SELECT * FROM plupps").and_return(@command) + @command.should_receive(:execute_reader).twice.and_return(@reader) + Plupp.should_receive(:repository).any_number_of_times.and_return(@repository) + Plupp.should_receive(:repository).any_number_of_times.with(:plupp_repo).and_return(@repository) + Plupp.find_by_sql("SELECT * FROM plupps").to_a + Plupp.find_by_sql("SELECT * FROM plupps", :repository => :plupp_repo).to_a + end + + it "should accept an Array argument with or without options hash" do + @connection.should_receive(:create_command).twice.with("SELECT * FROM plupps WHERE plur = ?").and_return(@command) + @command.should_receive(:execute_reader).twice.with("my pretty plur").and_return(@reader) + Plupp.should_receive(:repository).any_number_of_times.and_return(@repository) + Plupp.should_receive(:repository).any_number_of_times.with(:plupp_repo).and_return(@repository) + Plupp.find_by_sql(["SELECT * FROM plupps WHERE plur = ?", "my pretty plur"]).to_a + Plupp.find_by_sql(["SELECT * FROM plupps WHERE plur = ?", "my pretty plur"], :repository => :plupp_repo).to_a + end + + it "should accept a Query argument with or without options hash" do + @connection.should_receive(:create_command).twice.with('SELECT "name" FROM "plupps" WHERE ("name" = ?) ORDER BY "id"').and_return(@command) + @command.should_receive(:execute_reader).twice.with('my pretty plur').and_return(@reader) + Plupp.should_receive(:repository).any_number_of_times.and_return(@repository) + Plupp.should_receive(:repository).any_number_of_times.with(:plupp_repo).and_return(@repository) + Plupp.find_by_sql(DataMapper::Query.new(@repository, Plupp, "name" => "my pretty plur", :fields => ["name"])).to_a + Plupp.find_by_sql(DataMapper::Query.new(@repository, Plupp, "name" => "my pretty plur", :fields => ["name"]), :repository => :plupp_repo).to_a + end + + it "requires a Repository that is a DataObjectsRepository to work" do + non_do_adapter = mock("non do adapter") + non_do_repo = mock("non do repo") + non_do_repo.stub!(:adapter).and_return(non_do_adapter) + Plupp.should_receive(:repository).any_number_of_times.with(:plupp_repo).and_return(non_do_repo) + Proc.new do + Plupp.find_by_sql(:repository => :plupp_repo) + end.should raise_error(Exception, /DataObjectsAdapter/) + end + + it "requires some kind of query to work at all" do + Plupp.should_receive(:repository).any_number_of_times.with(:plupp_repo).and_return(@repository) + Proc.new do + Plupp.find_by_sql(:repository => :plupp_repo) + end.should raise_error(Exception, /requires a query/) + end + + end + + end + + describe '#uri options' do + it 'should transform a fully specified option hash into a URI' do + options = { + :adapter => 'mysql', + :host => 'davidleal.com', + :username => 'me', + :password => 'mypass', + :port => 5000, + :database => 'you_can_call_me_al', + :socket => 'nosock' + } + + adapter = DataMapper::Adapters::DataObjectsAdapter.new(:spec, options) + adapter.uri.should == + DataObjects::URI.parse("mysql://me:mypass@davidleal.com:5000/you_can_call_me_al?socket=nosock") + end + + it 'should transform a minimal options hash into a URI' do + options = { + :adapter => 'mysql', + :database => 'you_can_call_me_al' + } + + adapter = DataMapper::Adapters::DataObjectsAdapter.new(:spec, options) + adapter.uri.should == DataObjects::URI.parse("mysql:you_can_call_me_al") + end + + it 'should accept the uri when no overrides exist' do + uri = Addressable::URI.parse("protocol:///") + DataMapper::Adapters::DataObjectsAdapter.new(:spec, uri).uri.should == DataObjects::URI.parse(uri) + end + end + + describe '#create' do + before do + @result = mock('result', :to_i => 1, :insert_id => 1) + + @adapter.stub!(:execute).and_return(@result) + @adapter.stub!(:supports_returning?).and_return(false) + + @property = mock('property', :kind_of? => true, :serial? => true, :name => :property, :field => 'property', :custom? => false, :typecast => 'bind value') + @properties = [ @property ] + @bind_values = [ 'bind value' ] + @attributes = mock('attributes', :keys => @properties, :values => @bind_values) + @model = mock('model', :kind_of? => true, :key => [ @property ], :storage_name => 'models') + @resource = mock('resource', :model => @model, :dirty_attributes => @attributes) + + @property.stub!(:set!).and_return(@resource) + + @statement = 'INSERT INTO "models" ("property") VALUES (?)' + end + + def do_create + @adapter.create([ @resource ]) + end + + it 'should use only dirty properties' do + @resource.should_receive(:dirty_attributes).with(no_args).and_return(@attributes) + do_create.should == 1 + end + + it 'should use the bind values' do + @attributes.should_receive(:values).with(no_args).and_return(@bind_values) + + @adapter.should_receive(:execute).with(@statement, *@bind_values).and_return(@result) + + do_create.should == 1 + end + + it 'should generate an SQL statement when supports_returning? is true' do + @property.should_receive(:serial?).with(no_args).and_return(true) + @adapter.should_receive(:supports_returning?).with(no_args).and_return(true) + + @statement = 'INSERT INTO "models" ("property") VALUES (?) RETURNING "property"' + @adapter.should_receive(:execute).with(@statement, 'bind value').and_return(@result) + + do_create.should == 1 + end + + it 'should generate an SQL statement when supports_default_values? is true' do + @bind_values.clear + @properties.clear + @adapter.should_receive(:supports_default_values?).with(no_args).and_return(true) + + @statement = 'INSERT INTO "models" DEFAULT VALUES' + @adapter.should_receive(:execute).with(@statement).and_return(@result) + + do_create.should == 1 + end + + it 'should generate an SQL statement when supports_default_values? is false' do + @bind_values.clear + @properties.clear + @adapter.should_receive(:supports_default_values?).with(no_args).and_return(false) + + @statement = 'INSERT INTO "models" () VALUES ()' + @adapter.should_receive(:execute).with(@statement).and_return(@result) + + do_create.should == 1 + end + + it 'should return 0 if no rows created' do + @result.should_receive(:to_i).with(no_args).and_return(0) + do_create.should == 0 + end + + it 'should return 1 if number of rows created is 1' do + @result.should_receive(:to_i).with(no_args).and_return(1) + do_create.should == 1 + end + + it 'should set the resource primary key if the model key size is 1 and the key is serial' do + @model.key.size.should == 1 + @property.should_receive(:serial?).and_return(true) + @result.should_receive(:insert_id).and_return(777) + @property.should_receive(:set!).with(@resource, 777) + do_create.should == 1 + end + end + + [ :read_many, :read_one ].each do |method| + describe "##{method}" do + before do + @key = mock('key') + @model = mock('model', :key => @key, :storage_name => 'models', :relationships => {}) + @primitive = mock('primitive') + @property = mock('property', :kind_of? => true, :model => @model, :field => 'property', :primitive => @primitive) + + @child_model = @model + @parent_model = mock('parent model', :storage_name => 'parents') + @parent_property = mock('parent id', :kind_of? => true, :model => @parent_model, :field => 'id') + + @child_key = [ @property ] + @parent_key = [ @parent_property ] + @relationship = mock('relationship', :child_model => @child_model, :parent_model => @parent_model, :child_key => @child_key, :parent_key => @parent_key) + @links = [ @relationship ] + + @fields = [ @property ] + @bind_values = [ 'bind value' ] + @conditions = [ [ :eql, @property, @bind_values[0] ] ] + + @direction = mock('direction', :property => @property, :direction => :desc) + @order = [ @direction ] + + @query = mock('query', :model => @model, :kind_of? => true, :links => @links, :fields => @fields, :conditions => @conditions, :order => @order, :limit => 111, :offset => 222, :bind_values => @bind_values) + @query.should_receive(:unique?).with(no_args).and_return(false) + + @reader = mock('reader', :close => true, :next! => false) + @command = mock('command', :set_types => nil, :execute_reader => @reader) + @connection = mock('connection', :close => true, :create_command => @command) + + DataObjects::Connection.stub!(:new).and_return(@connection) + DataMapper::Query::Direction.stub!(:===).and_return(true) + end + + if method == :read_one + before do + @query.should_receive(:limit).with(no_args).twice.and_return(1) + + @values = @bind_values.dup + + @reader.should_receive(:next!).with(no_args).and_return(true) + @reader.should_receive(:values).with(no_args).and_return(@values) + + @resource = mock('resource') + @resource.should_receive(:kind_of?).with(DataMapper::Resource).any_number_of_times.and_return(true) + + @model.should_receive(:load).with(@values, @query).and_return(@resource) + + @statement = 'SELECT "models"."property" FROM "models" INNER JOIN "parents" ON ("parents"."id" = "models"."property") WHERE ("models"."property" = ?) ORDER BY "models"."property" DESC LIMIT 1 OFFSET 222' + end + + define_method(:do_read) do + resource = @adapter.read_one(@query) + resource.should == @resource + resource + end + elsif method == :read_many + before do + @statement = 'SELECT "models"."property" FROM "models" INNER JOIN "parents" ON ("parents"."id" = "models"."property") WHERE ("models"."property" = ?) ORDER BY "models"."property" DESC LIMIT 111 OFFSET 222' + end + + define_method(:do_read) do + collection = @adapter.read_many(@query) + collection.to_a + collection + end + end + + it 'should use the bind values' do + @command.should_receive(:execute_reader).with(*@bind_values).and_return(@reader) + do_read + end + + it 'should generate an SQL statement' do + @connection.should_receive(:create_command).with(@statement).and_return(@command) + do_read + end + + it 'should generate an SQL statement with composite keys' do + other_property = mock('other property', :kind_of? => true) + other_property.should_receive(:field).with(:default).and_return('other') + other_property.should_receive(:model).with(no_args).and_return(@model) + + other_value = 'other value' + @bind_values << other_value + @conditions << [ :eql, other_property, other_value ] + + @statement = %[SELECT "models"."property" FROM "models" INNER JOIN "parents" ON ("parents"."id" = "models"."property") WHERE ("models"."property" = ?) AND ("models"."other" = ?) ORDER BY "models"."property" DESC LIMIT #{method == :read_one ? '1' : '111'} OFFSET 222] + @query.should_receive(:conditions).with(no_args).twice.and_return(@conditions) + + @connection.should_receive(:create_command).with(@statement).and_return(@command) + + do_read + end + + it 'should set the return types to the property primitives' do + @command.should_receive(:set_types).with([ @primitive ]) + do_read + end + + it 'should close the reader' do + @reader.should_receive(:close).with(no_args) + do_read + end + + it 'should close the connection' do + @connection.should_receive(:close).with(no_args) + do_read + end + + if method == :read_one + it 'should return a DataMapper::Resource' do + do_read.should == be_kind_of(DataMapper::Resource) + end + else + it 'should return a DataMapper::Collection' do + do_read.should be_kind_of(DataMapper::Collection) + end + end + end + end + + describe '#update' do + before do + @result = mock('result', :to_i => 1) + + @adapter.stub!(:execute).and_return(@result) + + @values = %w[ new ] + @model = mock('model', :storage_name => 'models') + @property = mock('property', :kind_of? => true, :field => 'property') + @bind_values = [ 'bind value' ] + @conditions = [ [ :eql, @property, @bind_values[0] ] ] + @attributes = mock('attributes', :kind_of? => true, :empty? => false, :keys => [ @property ], :values => @values) + @query = mock('query', :kind_of? => true, :model => @model, :links => [], :conditions => @conditions, :bind_values => @bind_values) + @statement = 'UPDATE "models" SET "property" = ? WHERE ("property" = ?)' + end + + def do_update + @adapter.update(@attributes, @query) + end + + it 'should use the bind values' do + @attributes.should_receive(:values).with(no_args).and_return(@values) + @query.should_receive(:bind_values).with(no_args).and_return(@bind_values) + + @adapter.should_receive(:execute).with(@statement, *@values + @bind_values).and_return(@result) + + do_update.should == 1 + end + + it 'should generate an SQL statement' do + other_property = mock('other property', :kind_of? => true) + other_property.should_receive(:field).with(:default).and_return('other') + other_property.should_receive(:model).with(no_args).and_return(@model) + + other_value = 'other value' + @bind_values << other_value + @conditions << [ :eql, other_property, other_value ] + + @query.should_receive(:conditions).with(no_args).twice.and_return(@conditions) + + @statement = 'UPDATE "models" SET "property" = ? WHERE ("property" = ?) AND ("other" = ?)' + @adapter.should_receive(:execute).with(@statement, *%w[ new ] + @bind_values).and_return(@result) + + do_update.should == 1 + end + + it 'should return 0 if no rows updated' do + @result.should_receive(:to_i).with(no_args).and_return(0) + do_update.should == 0 + end + + it 'should return 1 if number of rows updated is 1' do + @result.should_receive(:to_i).with(no_args).and_return(1) + do_update.should == 1 + end + end + + describe '#delete' do + before do + @result = mock('result', :to_i => 1) + + @adapter.stub!(:execute).and_return(@result) + + @model = mock('model', :storage_name => 'models') + @property = mock('property', :kind_of? => true, :field => 'property') + @bind_values = [ 'bind value' ] + @conditions = [ [ :eql, @property, @bind_values[0] ] ] + @query = mock('query', :kind_of? => true, :model => @model, :links => [], :conditions => @conditions, :bind_values => @bind_values) + @resource = mock('resource', :to_query => @query) + @statement = 'DELETE FROM "models" WHERE ("property" = ?)' + end + + def do_delete + @adapter.delete(@resource.to_query(@repository)) + end + + it 'should use the bind values' do + @query.should_receive(:bind_values).with(no_args).and_return(@bind_values) + + @adapter.should_receive(:execute).with(@statement, *@bind_values).and_return(@result) + + do_delete.should == 1 + end + + it 'should generate an SQL statement' do + other_property = mock('other property', :kind_of? => true) + other_property.should_receive(:field).with(:default).and_return('other') + other_property.should_receive(:model).with(no_args).and_return(@model) + + other_value = 'other value' + @bind_values << other_value + @conditions << [ :eql, other_property, other_value ] + + @query.should_receive(:conditions).with(no_args).twice.and_return(@conditions) + + @statement = 'DELETE FROM "models" WHERE ("property" = ?) AND ("other" = ?)' + @adapter.should_receive(:execute).with(@statement, *@bind_values).and_return(@result) + + do_delete.should == 1 + end + + it 'should return 0 if no rows deleted' do + @result.should_receive(:to_i).with(no_args).and_return(0) + do_delete.should == 0 + end + + it 'should return 1 if number of rows deleted is 1' do + @result.should_receive(:to_i).with(no_args).and_return(1) + do_delete.should == 1 + end + end + + describe "when upgrading tables" do + it "should raise NotImplementedError when #storage_exists? is called" do + lambda { @adapter.storage_exists?("cheeses") }.should raise_error(NotImplementedError) + end + + describe "#upgrade_model_storage" do + it "should call #create_model_storage" do + @adapter.should_receive(:create_model_storage).with(repository, Cheese).and_return(true) + @adapter.upgrade_model_storage(repository, Cheese).should == Cheese.properties + end + + it "should check if all properties of the model have columns if the table exists" do + @adapter.should_receive(:field_exists?).with("cheeses", "id").and_return(true) + @adapter.should_receive(:field_exists?).with("cheeses", "name").and_return(true) + @adapter.should_receive(:field_exists?).with("cheeses", "color").and_return(true) + @adapter.should_receive(:field_exists?).with("cheeses", "notes").and_return(true) + @adapter.should_receive(:storage_exists?).with("cheeses").and_return(true) + @adapter.upgrade_model_storage(repository, Cheese).should == [] + end + + it "should create and execute add column statements for columns that dont exist" do + @adapter.should_receive(:field_exists?).with("cheeses", "id").and_return(true) + @adapter.should_receive(:field_exists?).with("cheeses", "name").and_return(true) + @adapter.should_receive(:field_exists?).with("cheeses", "color").and_return(true) + @adapter.should_receive(:field_exists?).with("cheeses", "notes").and_return(false) + @adapter.should_receive(:storage_exists?).with("cheeses").and_return(true) + connection = mock("connection") + connection.should_receive(:close) + @adapter.should_receive(:create_connection).and_return(connection) + statement = mock("statement") + command = mock("command") + result = mock("result") + command.should_receive(:execute_non_query).and_return(result) + connection.should_receive(:create_command).with(statement).and_return(command) + @adapter.should_receive(:alter_table_add_column_statement).with("cheeses", + { + :nullable? => true, + :name => "notes", + :serial? => false, + :primitive => "VARCHAR", + :size => 100 + }).and_return(statement) + @adapter.upgrade_model_storage(repository, Cheese).should == [Cheese.notes] + end + end + end + + describe '#execute' do + before do + @mock_command = mock('Command', :execute_non_query => nil) + @mock_db = mock('DB Connection', :create_command => @mock_command, :close => true) + + @adapter.stub!(:create_connection).and_return(@mock_db) + end + + it 'should #create_command from the sql passed' do + @mock_db.should_receive(:create_command).with('SQL STRING').and_return(@mock_command) + @adapter.execute('SQL STRING') + end + + it 'should pass any additional args to #execute_non_query' do + @mock_command.should_receive(:execute_non_query).with(:args) + @adapter.execute('SQL STRING', :args) + end + + it 'should return the result of #execute_non_query' do + @mock_command.should_receive(:execute_non_query).and_return(:result_set) + + @adapter.execute('SQL STRING').should == :result_set + end + + it 'should log any errors, then re-raise them' do + @mock_command.should_receive(:execute_non_query).and_raise("Oh Noes!") + DataMapper.logger.should_receive(:error) + + lambda { @adapter.execute('SQL STRING') }.should raise_error("Oh Noes!") + end + + it 'should always close the db connection' do + @mock_command.should_receive(:execute_non_query).and_raise("Oh Noes!") + @mock_db.should_receive(:close) + + lambda { @adapter.execute('SQL STRING') }.should raise_error("Oh Noes!") + end + end + + describe '#query' do + before do + @mock_reader = mock('Reader', :fields => ['id', 'UserName', 'AGE'], + :values => [1, 'rando', 27], + :close => true) + @mock_command = mock('Command', :execute_reader => @mock_reader) + @mock_db = mock('DB Connection', :create_command => @mock_command, :close => true) + + #make the while loop run exactly once + @mock_reader.stub!(:next!).and_return(true, nil) + @adapter.stub!(:create_connection).and_return(@mock_db) + end + + it 'should #create_command from the sql passed' do + @mock_db.should_receive(:create_command).with('SQL STRING').and_return(@mock_command) + @adapter.query('SQL STRING') + end + + it 'should pass any additional args to #execute_reader' do + @mock_command.should_receive(:execute_reader).with(:args).and_return(@mock_reader) + @adapter.query('SQL STRING', :args) + end + + describe 'returning multiple fields' do + + it 'should underscore the field names as members of the result struct' do + @mock_reader.should_receive(:fields).and_return(['id', 'UserName', 'AGE']) + + result = @adapter.query('SQL STRING') + + result.first.members.should == %w{id user_name age} + end + + it 'should convert each row into the struct' do + @mock_reader.should_receive(:values).and_return([1, 'rando', 27]) + + @adapter.query('SQL STRING') + end + + it 'should add the row structs into the results array' do + results = @adapter.query('SQL STRING') + + results.should be_kind_of(Array) + + row = results.first + row.should be_kind_of(Struct) + + row.id.should == 1 + row.user_name.should == 'rando' + row.age.should == 27 + end + + end + + describe 'returning a single field' do + + it 'should add the value to the results array' do + @mock_reader.should_receive(:fields).and_return(['username']) + @mock_reader.should_receive(:values).and_return(['rando']) + + results = @adapter.query('SQL STRING') + + results.should be_kind_of(Array) + results.first.should == 'rando' + end + + end + + it 'should log any errors, then re-raise them' do + @mock_command.should_receive(:execute_non_query).and_raise("Oh Noes!") + DataMapper.logger.should_receive(:error) + + lambda { @adapter.execute('SQL STRING') }.should raise_error("Oh Noes!") + end + + it 'should always close the db connection' do + @mock_command.should_receive(:execute_non_query).and_raise("Oh Noes!") + @mock_db.should_receive(:close) + + lambda { @adapter.execute('SQL STRING') }.should raise_error("Oh Noes!") + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/adapters/in_memory_adapter_spec.rb b/vendor/dm-core-0.9.6/spec/unit/adapters/in_memory_adapter_spec.rb new file mode 100644 index 0000000..7961d88 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/adapters/in_memory_adapter_spec.rb @@ -0,0 +1,98 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', "..", 'spec_helper')) + +describe DataMapper::Adapters::InMemoryAdapter do + before do + DataMapper.setup(:inmem, :adapter => 'in_memory') + + class Heffalump + include DataMapper::Resource + + def self.default_repository_name + :inmem + end + + property :color, String, :key => true # TODO: Drop the 'must have a key' limitation + property :num_spots, Integer + property :striped, Boolean + end + + @heff1 = Heffalump.create(:color => 'Black', :num_spots => 0, :striped => true) + @heff2 = Heffalump.create(:color => 'Brown', :num_spots => 25, :striped => false) + @heff3 = Heffalump.create(:color => 'Blue', :num_spots => nil, :striped => false) + end + + it 'should successfully save an object' do + @heff1.new_record?.should be_false + end + + it 'should be able to get the object' do + Heffalump.get('Black').should == @heff1 + end + + it 'should be able to get all the objects' do + Heffalump.all.should == [@heff1, @heff2, @heff3] + end + + it 'should be able to search for objects with equal value' do + Heffalump.all(:striped => true).should == [@heff1] + end + + it 'should be able to search for objects included in an array of values' do + Heffalump.all(:num_spots => [ 25, 50, 75, 100 ]).should == [@heff2] + end + + it 'should be able to search for objects included in a range of values' do + Heffalump.all(:num_spots => 25..100).should == [@heff2] + end + + it 'should be able to search for objects with nil value' do + Heffalump.all(:num_spots => nil).should == [@heff3] + end + + it 'should be able to search for objects with not equal value' do + Heffalump.all(:striped.not => true).should == [@heff2, @heff3] + end + + it 'should be able to search for objects not included in an array of values' do + Heffalump.all(:num_spots.not => [ 25, 50, 75, 100 ]).should == [@heff1, @heff3] + end + + it 'should be able to search for objects not included in a range of values' do + Heffalump.all(:num_spots.not => 25..100).should == [@heff1, @heff3] + end + + it 'should be able to search for objects with not nil value' do + Heffalump.all(:num_spots.not => nil).should == [@heff1, @heff2] + end + + it 'should be able to search for objects that match value' do + Heffalump.all(:color.like => 'Bl').should == [@heff1, @heff3] + end + + it 'should be able to search for objects with value greater than' do + Heffalump.all(:num_spots.gt => 0).should == [@heff2] + end + + it 'should be able to search for objects with value greater than or equal to' do + Heffalump.all(:num_spots.gte => 0).should == [@heff1, @heff2] + end + + it 'should be able to search for objects with value less than' do + Heffalump.all(:num_spots.lt => 1).should == [@heff1] + end + + it 'should be able to search for objects with value less than or equal to' do + Heffalump.all(:num_spots.lte => 0).should == [@heff1] + end + + it 'should be able to update an object' do + @heff1.num_spots = 10 + @heff1.save + Heffalump.get('Black').num_spots.should == 10 + end + + it 'should be able to destroy an object' do + @heff1.destroy + Heffalump.all.size.should == 2 + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/adapters/postgres_adapter_spec.rb b/vendor/dm-core-0.9.6/spec/unit/adapters/postgres_adapter_spec.rb new file mode 100644 index 0000000..29cae8a --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/adapters/postgres_adapter_spec.rb @@ -0,0 +1,133 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', "..", 'spec_helper')) + +if HAS_POSTGRES + describe DataMapper::Adapters::PostgresAdapter do + before :all do + @adapter = repository(:postgres).adapter + end + + describe '#upgrade_model_storage' do + before do + @repository = mock('repository', :kind_of? => true, :name => :postgres) + @model = mock('model', :kind_of? => true, :storage_name => 'models') + @property = mock('property', :kind_of? => true, :model => @model, :serial? => true, :field => 'property') + + @model.should_receive(:properties).with(:postgres).any_number_of_times.and_return([@property]) + + @command = mock('command') + @connection = mock('connection', :create_command => @command, :close => true) + @result = mock('result', :to_i => 0) + + DataObjects::Connection.stub!(:new).and_return(@connection) + + @adapter.stub!(:execute).and_return(@result) + @adapter.stub!(:storage_exists?).and_return(true) + @adapter.stub!(:query).and_return([ 0 ]) + + @original_method = @adapter.class.superclass.instance_method(:upgrade_model_storage) + @adapter.class.superclass.send(:define_method, :upgrade_model_storage) {} + end + + after do + method = @original_method + @adapter.class.superclass.send(:define_method, :upgrade_model_storage) do |*args| + method.bind(self).call(*args) + end + end + + it 'should check to make sure the sequences exist' do + statement = %q[SELECT COUNT(*) FROM "pg_class" WHERE "relkind" = 'S' AND "relname" = ?] + @adapter.should_receive(:query).with(statement, 'models_property_seq').and_return([ 0 ]) + @adapter.upgrade_model_storage(@repository, @model) + end + + it 'should add sequences' do + statement = %q[CREATE SEQUENCE "models_property_seq"] + @adapter.should_receive(:execute).with(statement) + @adapter.upgrade_model_storage(@repository, @model) + end + + it 'should execute the superclass upgrade_model_storage' do + rv = mock('inside super') + @adapter.class.superclass.send(:define_method, :upgrade_model_storage) { rv } + @adapter.upgrade_model_storage(@repository, @model).should == rv + end + end + + describe '#create_model_storage' do + before do + @repository = mock('repository', :kind_of? => true, :name => :postgres) + @model = mock('model', :kind_of? => true, :storage_name => 'models') + @property = mock('property', :kind_of? => true, :model => @model, :serial? => true, :field => 'property') + + @model.should_receive(:properties).with(:postgres).any_number_of_times.and_return([@property]) + + @adapter.stub!(:execute).and_return(@result) + @adapter.stub!(:storage_exists?).and_return(true) + @adapter.stub!(:query).and_return([ 0 ]) + + @original_method = @adapter.class.superclass.instance_method(:create_table_statement) + @adapter.class.superclass.send(:define_method, :create_table_statement) {} + end + + after do + method = @original_method + @adapter.class.superclass.send(:define_method, :create_table_statement) do |*args| + method.bind(self).call(*args) + end + end + + it 'should check to make sure the sequences exist' do + statement = %q[SELECT COUNT(*) FROM "pg_class" WHERE "relkind" = 'S' AND "relname" = ?] + @adapter.should_receive(:query).with(statement, 'models_property_seq').and_return([ 0 ]) + @adapter.create_model_storage(@repository, @model) + end + + it 'should add sequences' do + statement = %q[CREATE SEQUENCE "models_property_seq"] + @adapter.should_receive(:execute).with(statement) + @adapter.create_model_storage(@repository, @model) + end + + it 'should execute the superclass upgrade_model_storage' do + rv = mock('inside super') + @adapter.class.superclass.send(:define_method, :create_table_statement) { rv } + @adapter.create_table_statement(@repository, @model).should == rv + end + end + + describe '#destroy_model_storage' do + before do + @repository = mock('repository', :kind_of? => true, :name => :postgres) + @model = mock('model', :kind_of? => true, :storage_name => 'models') + @property = mock('property', :kind_of? => true, :model => @model, :serial? => true, :field => 'property') + + @model.should_receive(:properties).with(:postgres).any_number_of_times.and_return([@property]) + + @original_method = @adapter.class.superclass.instance_method(:destroy_model_storage) + @adapter.class.superclass.send(:define_method, :destroy_model_storage) {} + end + + after do + method = @original_method + @adapter.class.superclass.send(:define_method, :destroy_model_storage) do |*args| + method.bind(self).call(*args) + end + end + + it 'should not execute the superclass destroy_model_storage if the storage does not exist' do + rv = mock('inside super') + @adapter.class.superclass.send(:define_method, :destroy_model_storage) { rv } + @adapter.destroy_model_storage(@repository, @model).should_not == rv + end + + it 'should execute the superclass destroy_model_storage if the storage exists' do + rv = mock('inside super') + @adapter.class.superclass.send(:define_method, :destroy_model_storage) { rv } + @adapter.stub!(:storage_exists?).and_return(true) + + @adapter.destroy_model_storage(@repository, @model).should == rv + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/associations/many_to_many_spec.rb b/vendor/dm-core-0.9.6/spec/unit/associations/many_to_many_spec.rb new file mode 100644 index 0000000..6a78def --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/associations/many_to_many_spec.rb @@ -0,0 +1,32 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'spec_helper')) + +describe DataMapper::Associations::ManyToMany do + + load_models_for_metaphor :vehicles, :content + + it 'should allow a declaration' do + lambda do + class Supplier + has n, :manufacturers, :through => Resource + end + end.should_not raise_error + end + + it 'should handle models inside modules' do + lambda do + module Content + class Dialect + has n, :locales, :through => Resource, :class_name => "Language::Locale" + end + + class Locale + has n, :dialects, :through => Resource, :class_name => "Language::Dialect" + end + end + end.should_not raise_error + end + +end + +describe DataMapper::Associations::ManyToMany::Proxy do +end diff --git a/vendor/dm-core-0.9.6/spec/unit/associations/many_to_one_spec.rb b/vendor/dm-core-0.9.6/spec/unit/associations/many_to_one_spec.rb new file mode 100644 index 0000000..9fe7320 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/associations/many_to_one_spec.rb @@ -0,0 +1,152 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'spec_helper')) + +describe DataMapper::Associations::ManyToOne do + + load_models_for_metaphor :vehicles + + it 'should allow a declaration' do + lambda do + class Vehicle + belongs_to :manufacturer + end + end.should_not raise_error + end +end + +describe DataMapper::Associations::ManyToOne::Proxy do + + load_models_for_metaphor :vehicles + + before do + @child = mock('child', :kind_of? => true) + @parent = mock('parent') + @relationship = mock('relationship', :kind_of? => true, :get_parent => @parent, :attach_parent => nil) + @association = DataMapper::Associations::ManyToOne::Proxy.new(@relationship, @child) + + @association.replace(@parent) + end + + it 'should provide #replace' do + @association.should respond_to(:replace) + end + + describe '#replace' do + before do + @other = mock('other parent') + end + + before do + @relationship.should_receive(:attach_parent).with(@child, @other) + end + + it 'should remove the resource from the collection' do + @association.should == @parent + @association.replace(@other) + @association.should == @other + end + + it 'should not automatically save that the resource was removed from the association' do + @other.should_not_receive(:save) + @association.replace(@other) + end + + it 'should return the association' do + @association.replace(@other).object_id.should == @association.object_id + end + end + + it 'should provide #save' do + @association.should respond_to(:replace) + end + + describe '#save' do + describe 'when the parent is nil' do + before do + @parent.should_receive(:nil?).with(no_args).and_return(true) + end + + it 'should not save the parent' do + @association.save + end + + it 'should return false' do + @association.save.should == false + end + end + + describe 'when the parent is not a new record' do + before do + @parent.should_receive(:new_record?).with(no_args).and_return(false) + end + + it 'should not save the parent' do + @parent.should_not_receive(:save) + @association.save + end + + it 'should return true' do + @association.save.should == true + end + end + + describe 'when the parent is a new record' do + before do + @parent.should_receive(:new_record?).with(no_args).and_return(true) + end + + it 'should save the parent' do + @relationship.should_receive(:with_repository).and_yield(@repository) + @parent.should_receive(:save).with(no_args) + @association.save + end + + it 'should return the result of the save' do + child_key = mock("child_key") + child_key.should_receive(:set).and_return(true) + parent_key = mock("parent_key") + parent_key.should_receive(:get).and_return(1) + @relationship.should_receive(:with_repository).and_yield(@repository) + @relationship.should_receive(:child_key).and_return(child_key) + @relationship.should_receive(:parent_key).and_return(parent_key) + save_results = mock('save results') + @parent.should_receive(:save).with(no_args).and_return(save_results) + @association.save.object_id.should == save_results.object_id + end + end + end + + it 'should provide #reload' do + @association.should respond_to(:reload) + end + + describe '#reload' do + before(:each) do + @mock_parent = mock('#reload test parent') + @association.replace(@mock_parent) + end + + it 'should set the @parent ivar to nil' do + @association.__send__(:parent).should == @mock_parent # Sanity check. + + # We can't test the value of the instance variable since + # #instance_variable_get will be run on the @parent (thanks to + # Proxy#method_missing). Instead, test that Relationship#get_parent is + # run -- if @parent wasn't set to nil, this expectation should fail. + @relationship.should_receive(:get_parent).once.and_return(@mock_parent) + @association.reload + + # Trigger #get_parent on the relationship. + @association.__send__(:parent) + end + + it 'should not change the foreign key in the child' do + @relationship.should_not_receive(:attach_parent) + @association.reload + end + + it 'should return self' do + @association.reload.should be_kind_of(DataMapper::Associations::ManyToOne::Proxy) + @association.reload.object_id.should == @association.object_id + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/associations/one_to_many_spec.rb b/vendor/dm-core-0.9.6/spec/unit/associations/one_to_many_spec.rb new file mode 100644 index 0000000..e57605a --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/associations/one_to_many_spec.rb @@ -0,0 +1,393 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'spec_helper')) + +describe DataMapper::Associations::OneToMany do + + load_models_for_metaphor :vehicles + + before do + @class = Class.new do + def self.name + 'User' + end + + include DataMapper::Resource + + property :user_id, Integer, :key => true + end + end + + it 'should provide #has' do + @class.should respond_to(:has) + end + + describe '#has' do + it 'should return a Relationship' do + @class.has(@class.n, :orders).should be_kind_of(DataMapper::Associations::Relationship) + end + + describe 'relationship' do + before do + @relationship = mock('relationship') + DataMapper::Associations::Relationship.stub!(:new).and_return(@relationship) + end + + it 'should receive the name' do + DataMapper::Associations::Relationship.should_receive(:new) do |name,_,_,_,_| + name.should == :orders + end + @class.has(@class.n, :orders) + end + + it 'should receive the repository name' do + DataMapper::Associations::Relationship.should_receive(:new) do |_,repository_name,_,_,_| + repository_name.should == :mock + end + repository(:mock) do + @class.has(@class.n, :orders) + end + end + + it 'should receive the child model name when passed in as class_name' do + DataMapper::Associations::Relationship.should_receive(:new) do |_,_,child_model_name,_,_| + child_model_name.should == 'Company::Order' + end + @class.has(@class.n, :orders, :class_name => 'Company::Order') + end + + it 'should receive the child model name when class_name not passed in' do + DataMapper::Associations::Relationship.should_receive(:new) do |_,_,child_model_name,_,_| + child_model_name.should == 'Order' + end + @class.has(@class.n, :orders) + end + + it 'should receive the parent model name' do + DataMapper::Associations::Relationship.should_receive(:new) do |_,_,_,parent_model_name,_| + parent_model_name.should == @class + end + @class.has(@class.n, :orders) + end + + it 'should receive the parent model name' do + options = { :min => 0, :max => 100 } + DataMapper::Associations::Relationship.should_receive(:new) do |_,_,_,parent_model_name,_| + options.object_id.should == options.object_id + end + @class.has(@class.n, :orders, options) + end + end + + it 'should add an accessor for the proxy' do + @class.new.should_not respond_to(:orders) + @class.has(@class.n, :orders) + @class.new.should respond_to(:orders) + end + + describe 'proxy accessor' do + before :all do + class User + include DataMapper::Resource + end + + class Order + include DataMapper::Resource + end + end + + it 'should return a OneToMany::Proxy' do + @class.has(@class.n, :orders) + @class.new.orders.should be_kind_of(DataMapper::Associations::OneToMany::Proxy) + end + end + end + + it 'should work with classes inside modules' +end + +describe DataMapper::Associations::OneToMany::Proxy do + before do + @parent = mock('parent', :new_record? => true, :kind_of? => true) + @resource = mock('resource', :null_object => true) + @collection = [] + @parent_key = mock('parent key', :get => []) + @repository = mock('repository', :save => nil, :kind_of? => true) + @relationship = mock('relationship', :get_children => @collection, :query => {}, :kind_of? => true, :child_key => [], :parent_key => @parent_key) + @association = DataMapper::Associations::OneToMany::Proxy.new(@relationship, @parent) + end + + describe 'a method that relates the resource', :shared => true do + it 'should add the resource to the collection' do + @association.should_not include(@resource) + do_add.should == return_value + @association.should include(@resource) + end + + it 'should not automatically save that the resource was added to the association' do + @relationship.should_not_receive(:attach_parent) + do_add.should == return_value + end + + it 'should persist the addition after saving the association' do + @relationship.should_receive(:with_repository).with(@resource).and_yield(@repository) + do_add.should == return_value + @relationship.should_receive(:attach_parent).with(@resource, @parent) + @association.save + end + end + + describe 'a method that orphans the resource', :shared => true do + before do + @association << @resource + end + + it 'should remove the resource from the collection' do + @association.should include(@resource) + do_remove.should == return_value + @association.should_not include(@resource) + end + + it 'should not automatically save that the resource was removed from the association' do + @relationship.should_not_receive(:attach_parent) + do_remove.should == return_value + end + + it 'should persist the removal after saving the association' do + @relationship.should_receive(:with_repository).with(@resource).and_yield(@repository) + do_remove.should == return_value + @relationship.should_receive(:attach_parent).with(@resource, nil) + @association.save + end + end + + it 'should provide #<<' do + @association.should respond_to(:<<) + end + + describe '#<<' do + def do_add + @association << @resource + end + + def return_value + @association + end + + it_should_behave_like 'a method that relates the resource' + end + + it 'should provide #push' do + @association.should respond_to(:push) + end + + describe '#push' do + def do_add + @association.push(@resource) + end + + def return_value + @association + end + + it_should_behave_like 'a method that relates the resource' + end + + it 'should provide #unshift' do + @association.should respond_to(:unshift) + end + + describe '#unshift' do + def do_add + @association.unshift(@resource) + end + + def return_value + @association + end + + it_should_behave_like 'a method that relates the resource' + end + + it 'should provide #replace' do + @association.should respond_to(:replace) + end + + describe '#replace' do + before do + @children = [ + mock('child 1', :save => true), + mock('child 2', :save => true), + ] + @collection << @resource + @collection.stub!(:loaded?).and_return(true) + @relationship.stub!(:attach_parent) + end + + def do_replace + @association.replace(@children) + end + + def return_value + @association + end + + it 'should remove the resource from the collection' do + @association.should include(@resource) + do_replace.should == return_value + @association.should_not include(@resource) + end + + it 'should not automatically save that the resource was removed from the association' do + @relationship.should_not_receive(:attach_parent) + do_replace.should == return_value + end + + it 'should persist the removal after saving the association' do + do_replace.should == return_value + @relationship.should_receive(:with_repository).exactly(3).times.and_yield(@repository) + @relationship.should_receive(:attach_parent).with(@resource, nil) + @association.save + end + + it 'should not automatically save that the children were added to the association' do + @relationship.should_not_receive(:attach_parent) + do_replace.should == return_value + end + + it 'should persist the addition after saving the association' do + do_replace.should == return_value + @relationship.should_receive(:with_repository).exactly(3).times.and_yield(@repository) + @relationship.should_receive(:attach_parent).with(@children[0], @parent) + @relationship.should_receive(:attach_parent).with(@children[1], @parent) + @association.save + end + end + + it 'should provide #pop' do + @association.should respond_to(:pop) + end + + describe '#pop' do + def do_remove + @association.pop + end + + def return_value + @resource + end + + it_should_behave_like 'a method that orphans the resource' + end + + it 'should provide #shift' do + @association.should respond_to(:shift) + end + + describe '#shift' do + def do_remove + @association.shift + end + + def return_value + @resource + end + + it_should_behave_like 'a method that orphans the resource' + end + + it 'should provide #delete' do + @association.should respond_to(:delete) + end + + describe '#delete' do + def do_remove + @association.delete(@resource) + end + + def return_value + @resource + end + + it_should_behave_like 'a method that orphans the resource' + end + + it 'should provide #delete_at' do + @association.should respond_to(:delete_at) + end + + describe '#delete_at' do + def do_remove + @association.delete_at(0) + end + + def return_value + @resource + end + + it_should_behave_like 'a method that orphans the resource' + end + + it 'should provide #clear' do + @association.should respond_to(:clear) + end + + describe '#clear' do + def do_remove + @association.clear + end + + def return_value + @association + end + + it_should_behave_like 'a method that orphans the resource' + + it 'should empty the collection' do + @association << mock('other resource', :new_record? => false) + @association.should have(2).entries + do_remove + @association.should be_empty + end + end + + it 'should provide #reload' do + @association.should respond_to(:reload) + end + + describe '#reload' do + before do + @children = [ mock('child 1', :save => true), mock('child 2', :save => true) ] + @relationship.stub!(:get_children).and_return(@children) + end + + it 'should set the @children ivar to nil' do + @association.__send__(:children).should == @children # Sanity check. + + # We can't test the value of the @children instance variable since + # #instance_variable_get will be run on @children (thanks to + # Proxy#method_missing). Instead, test that Relationship#get_children is + # run -- if @children wasn't set to nil, this expectation should fail. + @relationship.should_receive(:get_children).once.and_return(@children) + @association.reload + + # Trigger #get_children on the relationship. + @association.__send__(:children).should == @children + end + + it 'should return self' do + @association.reload.should be_kind_of(DataMapper::Associations::OneToMany::Proxy) + @association.reload.object_id.should == @association.object_id + end + end + + describe 'when deleting the parent' do + it 'should delete all the children without calling destroy if relationship :dependent is :delete_all' + + it 'should destroy all the children if relationship :dependent is :destroy' + + it 'should set the parent key for each child to nil if relationship :dependent is :nullify' + + it 'should restrict the parent from being deleted if a child remains if relationship :dependent is restrict' + + it 'should be restrict by default if relationship :dependent is not specified' + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/associations/one_to_one_spec.rb b/vendor/dm-core-0.9.6/spec/unit/associations/one_to_one_spec.rb new file mode 100644 index 0000000..f48c1d5 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/associations/one_to_one_spec.rb @@ -0,0 +1,7 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'spec_helper')) + +describe "DataMapper::Associations::OneToOne" do + + it "should allow a declaration" do + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/associations/relationship_spec.rb b/vendor/dm-core-0.9.6/spec/unit/associations/relationship_spec.rb new file mode 100644 index 0000000..f772dcf --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/associations/relationship_spec.rb @@ -0,0 +1,71 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'spec_helper')) + +describe DataMapper::Associations::Relationship do + + load_models_for_metaphor :vehicles + + it "should describe an association" do + belongs_to = DataMapper::Associations::Relationship.new( + :manufacturer, + :mock, + 'Vehicle', + 'Manufacturer', + { :child_key => [ :manufacturer_id ] } + ) + + belongs_to.should respond_to(:name) + belongs_to.should respond_to(:with_repository) + belongs_to.should respond_to(:child_key) + belongs_to.should respond_to(:parent_key) + end + + it "should map properties explicitly when an association method passes them in its options" do + belongs_to = DataMapper::Associations::Relationship.new( + :manufacturer, + :mock, + 'Vehicle', + 'Manufacturer', + { :child_key => [ :manufacturer_id ], :parent_key => [ :id ] } + ) + + belongs_to.name.should == :manufacturer + belongs_to.with_repository do |r| + r.name.should == :mock + end + + belongs_to.child_key.should be_a_kind_of(DataMapper::PropertySet) + belongs_to.parent_key.should be_a_kind_of(DataMapper::PropertySet) + + belongs_to.child_key.to_a.should == Vehicle.properties(:mock).slice(:manufacturer_id) + belongs_to.parent_key.to_a.should == Manufacturer.properties(:mock).key + end + + it "should infer properties when options aren't passed" do + has_many = DataMapper::Associations::Relationship.new( + :models, + :mock, + 'Vehicle', + 'Manufacturer', + { :child_key => [:model_id] } + ) + + has_many.name.should == :models + has_many.with_repository do |r| + r.name.should == :mock + end + + has_many.child_key.should be_a_kind_of(DataMapper::PropertySet) + has_many.parent_key.should be_a_kind_of(DataMapper::PropertySet) + # Vehicle.has n, :models, :class_name => 'Manufacturer', :child_key => "models_id" + has_many.child_key.to_a.should == Vehicle.properties(:mock).slice(:model_id) + has_many.parent_key.to_a.should == Manufacturer.properties(:mock).key + end + + it "should generate child properties with a safe subset of the parent options" do + pending + # For example, :size would be an option you'd want a generated child Property to copy, + # but :serial or :key obviously not. So need to take a good look at Property::OPTIONS to + # see what applies and what doesn't. + end + +end diff --git a/vendor/dm-core-0.9.6/spec/unit/associations_spec.rb b/vendor/dm-core-0.9.6/spec/unit/associations_spec.rb new file mode 100644 index 0000000..851b6e0 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/associations_spec.rb @@ -0,0 +1,242 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe "DataMapper::Associations" do + + load_models_for_metaphor :vehicles + + before do + @relationship = mock(DataMapper::Associations::Relationship) + @n = 1.0/0 + + Manufacturer.mock_relationship = Vehicle.mock_relationship = @relationship + end + + describe "#many_to_one_relationships" do + before :all do + module MTORelationships + class A + include DataMapper::Resource + def self.default_repository_name + :a_db + end + repository(:b_db) do + belongs_to :b + end + repository(:c_db) do + belongs_to :c + end + end + class B + include DataMapper::Resource + def self.default_repository_name + :b_db + end + end + class C + include DataMapper::Resource + def self.default_repository_name + :c_db + end + end + end + end + it "should list all relationships that are one-to-many" do + MTORelationships::A.many_to_one_relationships.sort_by { |r| r.name.to_s }.should == [MTORelationships::A.relationships(:b_db)[:b], MTORelationships::A.relationships(:c_db)[:c]] + end + end + + describe ".relationships" do + class B + include DataMapper::Resource + end + + class C + include DataMapper::Resource + + repository(:mock) do + has 1, :b + end + end + + class D + include DataMapper::Resource + has 1, :b + end + + class E < D + end + + class F < D + has 1, :a + end + + it "should assume the default repository when no arguments are passed" do + lambda do + C.relationships + end.should_not raise_error + end + + it "should return the right set of relationships given the repository name" do + C.relationships.should be_empty + C.relationships(:mock).should_not be_empty + end + + it "should return the right set of relationships given the inheritance" do + E.relationships.should have(1).entries + D.relationships.should have(1).entries + F.relationships.should have(2).entries + end + end + + describe ".has" do + + it "should allow a declaration" do + lambda do + class Manufacturer + has 1, :halo_car + end + end.should_not raise_error + end + + it "should not allow a constraint that is not an Integer, Range or Infinity" do + lambda do + class Manufacturer + has '1', :halo_car + end + end.should raise_error(ArgumentError) + end + + it "should not allow a constraint where the min is larger than the max" do + lambda do + class Manufacturer + has 1..0, :halo_car + end + end.should raise_error(ArgumentError) + end + + it "should not allow overwriting of the auto assigned min/max values with keys" do + DataMapper::Associations::OneToMany.should_receive(:setup). + with(:vehicles, Manufacturer, { :min => 1, :max => 2 }). + and_return(@relationship) + + class Manufacturer + has(1..2, :vehicles, :min => 5, :max => 10).should == mock_relationship + end + end + + describe "one-to-one syntax" do + it "should create a basic one-to-one association with fixed constraint" do + DataMapper::Associations::OneToOne.should_receive(:setup). + with(:halo_car, Manufacturer, { :min => 1, :max => 1 }). + and_return(@relationship) + + class Manufacturer + has(1, :halo_car).should == mock_relationship + end + end + + it "should create a basic one-to-one association with min/max constraints" do + DataMapper::Associations::OneToOne.should_receive(:setup). + with(:halo_car, Manufacturer, { :min => 0, :max => 1 }). + and_return(@relationship) + + class Manufacturer + has(0..1, :halo_car).should == mock_relationship + end + end + + it "should create a one-to-one association with options" do + DataMapper::Associations::OneToOne.should_receive(:setup). + with(:halo_car, Manufacturer, { :min => 1, :max => 1, :class_name => 'Car' }). + and_return(@relationship) + + class Manufacturer + has(1, :halo_car, :class_name => 'Car').should == mock_relationship + end + end + end + + describe "one-to-many syntax" do + it "should create a basic one-to-many association with no constraints" do + DataMapper::Associations::OneToMany.should_receive(:setup). + with(:vehicles, Manufacturer, { :min => 0, :max => @n }). + and_return(@relationship) + + class Manufacturer + has(n, :vehicles).should == mock_relationship + end + end + + it "should create a one-to-many association with fixed constraint" do + DataMapper::Associations::OneToMany.should_receive(:setup). + with(:vehicles, Manufacturer, { :min => 4, :max => 4 }). + and_return(@relationship) + + class Manufacturer + has(4, :vehicles).should == mock_relationship + end + end + + it "should create a one-to-many association with min/max constraints" do + DataMapper::Associations::OneToMany.should_receive(:setup). + with(:vehicles, Manufacturer, { :min => 2, :max => 4 }). + and_return(@relationship) + + class Manufacturer + has(2..4, :vehicles).should == mock_relationship + end + end + + it "should create a one-to-many association with options" do + DataMapper::Associations::OneToMany.should_receive(:setup). + with(:vehicles, Manufacturer, { :min => 1, :max => @n, :class_name => 'Car' }). + and_return(@relationship) + + class Manufacturer + has(1..n, :vehicles, :class_name => 'Car').should == mock_relationship + end + end + + # do not remove or change this spec. + it "should raise an exception when n..n is used for the cardinality" do + lambda do + class Manufacturer + has n..n, :subsidiaries, :class_name => 'Manufacturer' + end + end.should raise_error(ArgumentError) + end + + it "should create one-to-many association and pass the :through option if specified" do + DataMapper::Associations::OneToMany.should_receive(:setup). + with(:suppliers, Vehicle, { :min => 0, :max => @n, :through => :manufacturers }). + and_return(@relationship) + + class Vehicle + has(n, :suppliers, :through => :manufacturers).should == mock_relationship + end + end + end + end + + describe ".belongs_to" do + it "should create a basic many-to-one association" do + DataMapper::Associations::ManyToOne.should_receive(:setup). + with(:vehicle, Manufacturer, {}). + and_return(@relationship) + + class Manufacturer + belongs_to(:vehicle).should == mock_relationship + end + end + + it "should create a many-to-one association with options" do + DataMapper::Associations::ManyToOne.should_receive(:setup). + with(:vehicle, Manufacturer, { :class_name => 'Car' }). + and_return(@relationship) + + class Manufacturer + belongs_to(:vehicle, :class_name => 'Car').should == mock_relationship + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/auto_migrations_spec.rb b/vendor/dm-core-0.9.6/spec/unit/auto_migrations_spec.rb new file mode 100644 index 0000000..38af09b --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/auto_migrations_spec.rb @@ -0,0 +1,111 @@ +require 'pathname' +require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper' + +require DataMapper.root / 'lib' / 'dm-core' / 'repository' +require DataMapper.root / 'lib' / 'dm-core' / 'resource' +require DataMapper.root / 'lib' / 'dm-core' / 'auto_migrations' + +describe DataMapper::AutoMigrations do + + before :all do + @cow = Class.new do + include DataMapper::Resource + + property :name, String, :key => true + property :age, Integer + end + end + + before(:each) do + DataMapper::Resource.descendants.clear + end + + after(:each) do + DataMapper::Resource.descendants.clear + end + + it "should add the resource class to AutoMigrator's models on a mixin" do + @class = Class.new do + include DataMapper::Resource + end + + DataMapper::Resource.descendants.should include(@class) + end + + it "should add the #auto_migrate! method on a mixin" do + @cat = Class.new do + include DataMapper::Resource + + property :name, String, :key => true + property :age, Integer + end + + @cat.should respond_to(:auto_migrate!) + end + + it "should add the #auto_upgrade! method on a mixin" do + @cat = Class.new do + include DataMapper::Resource + + property :name, String, :key => true + property :age, Integer + end + + @cat.should respond_to(:auto_upgrade!) + end + + it "should not conflict with other Migrators on a mixin" do + migrator_class = Class.new(DataMapper::Migrator) + + included_proc = lambda { |model| migrator_class.models << model } + + migrator_mixin = Module.new do + self.class.send(:define_method, :included, &included_proc) + end + + model_class = Class.new do + include DataMapper::Resource + include migrator_mixin + + property :name, String + property :age, String + end + + DataMapper::Resource.descendants.should include(model_class) + migrator_class.models.should include(model_class) + end + + describe "#auto_migrate" do + before do + @repository_name = mock('repository name') + end + + it "should call each model's auto_migrate! method" do + models = [:cat, :dog, :fish, :cow].map {|m| mock(m)} + + models.each do |model| + DataMapper::Resource.descendants << model + model.should_receive(:auto_migrate_down!).with(@repository_name) + model.should_receive(:auto_migrate_up!).with(@repository_name) + end + + DataMapper::AutoMigrator.auto_migrate(@repository_name) + end + end + describe "#auto_upgrade" do + before do + @repository_name = mock('repository name') + end + + it "should call each model's auto_upgrade! method" do + models = [:cat, :dog, :fish, :cow].map {|m| mock(m)} + + models.each do |model| + DataMapper::Resource.descendants << model + model.should_receive(:auto_upgrade!).with(@repository_name) + end + + DataMapper::AutoMigrator.auto_upgrade(@repository_name) + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/collection_spec.rb b/vendor/dm-core-0.9.6/spec/unit/collection_spec.rb new file mode 100644 index 0000000..3d47c62 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/collection_spec.rb @@ -0,0 +1,182 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +# ensure the Collection is extremely similar to an Array +# since it will be returned by Respository#all to return +# multiple resources to the caller +describe DataMapper::Collection do + before do + @property = mock('property') + @model = mock('model', :inheritance_property => [ @property ], :key => [ @property ]) + @query = mock('query', :kind_of? => true, :fields => [ @property ], :model => @model) + + @collection = DataMapper::Collection.new(@query) {} + end + + it 'should provide #<<' do + @collection.should respond_to(:<<) + end + + it 'should provide #all' do + @collection.should respond_to(:all) + end + + it 'should provide #at' do + @collection.should respond_to(:at) + end + + it 'should provide #build' do + @collection.should respond_to(:build) + end + + it 'should provide #clear' do + @collection.should respond_to(:clear) + end + + it 'should provide #collect!' do + @collection.should respond_to(:collect!) + end + + it 'should provide #concat' do + @collection.should respond_to(:concat) + end + + it 'should provide #create' do + @collection.should respond_to(:create) + end + + it 'should provide #delete' do + @collection.should respond_to(:delete) + end + + it 'should provide #delete_at' do + @collection.should respond_to(:delete_at) + end + + it 'should provide #destroy!' do + @collection.should respond_to(:destroy!) + end + + it 'should provide #each' do + @collection.should respond_to(:each) + end + + it 'should provide #each_index' do + @collection.should respond_to(:each_index) + end + + it 'should provide #eql?' do + @collection.should respond_to(:eql?) + end + + it 'should provide #fetch' do + @collection.should respond_to(:fetch) + end + + it 'should provide #first' do + @collection.should respond_to(:first) + end + + it 'should provide #freeze' do + @collection.should respond_to(:freeze) + end + + it 'should provide #get' do + @collection.should respond_to(:get) + end + + it 'should provide #get!' do + @collection.should respond_to(:get!) + end + + it 'should provide #insert' do + @collection.should respond_to(:insert) + end + + it 'should provide #last' do + @collection.should respond_to(:last) + end + + it 'should provide #load' do + @collection.should respond_to(:load) + end + + it 'should provide #loaded?' do + @collection.should respond_to(:loaded?) + end + + it 'should provide #pop' do + @collection.should respond_to(:pop) + end + + it 'should provide #push' do + @collection.should respond_to(:push) + end + + it 'should provide #properties' do + @collection.should respond_to(:properties) + end + + it 'should provide #reject' do + @collection.should respond_to(:reject) + end + + it 'should provide #reject!' do + @collection.should respond_to(:reject!) + end + + it 'should provide #relationships' do + @collection.should respond_to(:relationships) + end + + it 'should provide #reload' do + @collection.should respond_to(:reload) + end + + it 'should provide #reverse' do + @collection.should respond_to(:reverse) + end + + it 'should provide #reverse!' do + @collection.should respond_to(:reverse!) + end + + it 'should provide #reverse_each' do + @collection.should respond_to(:reverse_each) + end + + it 'should provide #select' do + @collection.should respond_to(:select) + end + + it 'should provide #shift' do + @collection.should respond_to(:shift) + end + + it 'should provide #slice' do + @collection.should respond_to(:slice) + end + + it 'should provide #slice!' do + @collection.should respond_to(:slice!) + end + + it 'should provide #sort' do + @collection.should respond_to(:sort) + end + + it 'should provide #sort!' do + @collection.should respond_to(:sort!) + end + + it 'should provide #unshift' do + @collection.should respond_to(:unshift) + end + + it 'should provide #update!' do + @collection.should respond_to(:update!) + end + + it 'should provide #values_at' do + @collection.should respond_to(:values_at) + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/data_mapper_spec.rb b/vendor/dm-core-0.9.6/spec/unit/data_mapper_spec.rb new file mode 100644 index 0000000..b1dad7d --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/data_mapper_spec.rb @@ -0,0 +1,35 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe DataMapper do + describe ".dependency_queue" do + before(:all) do + @q = DataMapper.dependency_queue + end + + it "should return a dependency queue" do + @q.should be_a_kind_of(DataMapper::DependencyQueue) + end + + it "should only create one dependency queue" do + @q.should == DataMapper.dependency_queue + end + end + + describe ".prepare" do + it "should pass the default repository to the block if no argument is given" do + DataMapper.should_receive(:repository).with(no_args).and_return :default_repo + + DataMapper.prepare do |r| + r.should == :default_repo + end + end + + it "should allow custom type maps to be defined inside the prepare block" do + lambda { + DataMapper.prepare do |r| + r.map(String).to(:VARCHAR).with(:size => 1000) + end + }.should_not raise_error + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/identity_map_spec.rb b/vendor/dm-core-0.9.6/spec/unit/identity_map_spec.rb new file mode 100644 index 0000000..2d451cc --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/identity_map_spec.rb @@ -0,0 +1,126 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe "DataMapper::IdentityMap" do + before(:all) do + class Cow + include DataMapper::Resource + property :id, Integer, :key => true + property :name, String + end + + class Chicken + include DataMapper::Resource + property :name, String + end + + class Pig + include DataMapper::Resource + property :id, Integer, :key => true + property :composite, Integer, :key => true + property :name, String + end + end + + it "should use a second level cache if created with on" + + it "should return nil on #get when it does not find the requested instance" do + map = DataMapper::IdentityMap.new + map.get([23]).should be_nil + end + + it "should return an instance on #get when it finds the requested instance" do + betsy = Cow.new({:id=>23,:name=>'Betsy'}) + map = DataMapper::IdentityMap.new + map.set(betsy.key, betsy) + map.get([23]).should == betsy + end + + it "should store an instance on #set" do + betsy = Cow.new({:id=>23,:name=>'Betsy'}) + map = DataMapper::IdentityMap.new + map.set(betsy.key, betsy) + map.get([23]).should == betsy + end + + it "should store instances with composite keys on #set" do + pig = Pig.new({:id=>1,:composite=>1,:name=> 'Pig'}) + piggy = Pig.new({:id=>1,:composite=>2,:name=>'Piggy'}) + + map = DataMapper::IdentityMap.new + map.set(pig.key, pig) + map.set(piggy.key, piggy) + + map.get([1,1]).should == pig + map.get([1,2]).should == piggy + end + + it "should remove an instance on #delete" do + betsy = Cow.new({:id=>23,:name=>'Betsy'}) + map = DataMapper::IdentityMap.new + map.set(betsy.key, betsy) + map.delete([23]) + map.get([23]).should be_nil + end +end + +describe "Second Level Caching" do + + before :all do + @mock_class = Class.new do + def get(key); raise NotImplementedError end + def set(key, instance); raise NotImplementedError end + def delete(key); raise NotImplementedError end + end + end + + it 'should expose a standard API' do + cache = @mock_class.new + cache.should respond_to(:get) + cache.should respond_to(:set) + cache.should respond_to(:delete) + end + + it 'should provide values when the first level cache entry is empty' do + cache = @mock_class.new + key = %w[ test ] + + cache.should_receive(:get).with(key).and_return('resource') + + map = DataMapper::IdentityMap.new(cache) + map.get(key).should == 'resource' + end + + it 'should be set when the first level cache entry is set' do + cache = @mock_class.new + betsy = Cow.new(:id => 23, :name => 'Betsy') + + cache.should_receive(:set).with(betsy.key, betsy).and_return(betsy) + + map = DataMapper::IdentityMap.new(cache) + map.set(betsy.key, betsy).should == betsy + end + + it 'should be deleted when the first level cache entry is deleted' do + cache = @mock_class.new + betsy = Cow.new(:id => 23, :name => 'Betsy') + + cache.stub!(:set) + cache.should_receive(:delete).with(betsy.key).and_return(betsy) + + map = DataMapper::IdentityMap.new(cache) + map.set(betsy.key, betsy).should == betsy + map.delete(betsy.key).should == betsy + end + + it 'should not provide values when the first level cache entry is full' do + cache = @mock_class.new + betsy = Cow.new(:id => 23, :name => 'Betsy') + + cache.stub!(:set) + cache.should_not_receive(:get) + + map = DataMapper::IdentityMap.new(cache) + map.set(betsy.key, betsy).should == betsy + map.get(betsy.key).should == betsy + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/is_spec.rb b/vendor/dm-core-0.9.6/spec/unit/is_spec.rb new file mode 100644 index 0000000..dfe84d1 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/is_spec.rb @@ -0,0 +1,80 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe "DataMapper::Is" do + describe ".is" do + + module DataMapper + + module Is + module Example + + def is_example(*args) + @args = args + + extend DataMapper::Is::Example::ClassMethods + end + + def is_example_args + @args + end + + module ClassMethods + def example_class_method + + end + end + + end + end + + module Model + include DataMapper::Is::Example + end # module Model + end # module DataMapper + + class House + include DataMapper::Resource + end + + class Cabin + include DataMapper::Resource + end + + it "should raise error unless it finds the plugin" do + lambda do + class House + is :no_plugin_by_this_name + end + end.should raise_error(DataMapper::PluginNotFoundError) + end + + it "should call plugin is_* method" do + lambda do + class House + is :example + end + end.should_not raise_error + end + + it "should pass through arguments to plugin is_* method" do + class House + is :example ,:option1 => :ping, :option2 => :pong + end + + House.is_example_args.length.should == 1 + House.is_example_args.first[:option2].should == :pong + end + + it "should not add class_methods before the plugin is activated" do + Cabin.respond_to?(:example_class_method).should be_false + + class Cabin + is :example + end + + Cabin.respond_to?(:example_class_method).should be_true + + end + + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/migrator_spec.rb b/vendor/dm-core-0.9.6/spec/unit/migrator_spec.rb new file mode 100644 index 0000000..8dff7c9 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/migrator_spec.rb @@ -0,0 +1,33 @@ +require 'pathname' +require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper' + +describe DataMapper::Migrator do + before(:each) do + DataMapper::Migrator.subclasses.clear + end + + after(:each) do + DataMapper::Migrator.subclasses.clear + end + + it "should keep track of subclasses" do + lambda { Class.new(DataMapper::Migrator) }.should change{ DataMapper::Migrator.subclasses.size }.by(1) + end + + it "should define a class level 'models' method for each subclass" do + klass = Class.new(DataMapper::Migrator) + + klass.should respond_to(:models) + end + + it "should keep subclass models seperated" do + klass_a = Class.new(DataMapper::Migrator) + klass_b = Class.new(DataMapper::Migrator) + + klass_a.models << :foo + + klass_b.models.should be_empty + + klass_a.models.should == [:foo] + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/model_spec.rb b/vendor/dm-core-0.9.6/spec/unit/model_spec.rb new file mode 100644 index 0000000..1a5251e --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/model_spec.rb @@ -0,0 +1,323 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe 'DataMapper::Model' do + module ModelSpec + class Resource + include DataMapper::Resource + + storage_names[:legacy] = 'legacy_resource' + + property :id, Serial + property :name, String + property :type, Discriminator + end + end + + it 'should provide .new' do + meta_class = class << ModelSpec::Resource; self end + meta_class.should respond_to(:new) + end + + describe '.new' do + it 'should require a default storage name and accept a block' do + pluto = DataMapper::Model.new('planets') do + property :name, String, :key => true + end + + pluto.storage_name(:default).should == 'planets' + pluto.storage_name(:legacy).should == 'planets' + pluto.properties[:name].should_not be_nil + end + end + + it 'should provide #transaction' do + ModelSpec::Resource.should respond_to(:transaction) + end + + describe '#transaction' do + it 'should return a new Transaction with Model as argument' do + transaction = mock("transaction") + DataMapper::Transaction.should_receive(:new).with(ModelSpec::Resource).and_return(transaction) + ModelSpec::Resource.transaction.should == transaction + end + end + + it 'should provide #before' do + ModelSpec::Resource.should respond_to(:before) + end + + it 'should provide #after' do + ModelSpec::Resource.should respond_to(:after) + end + + it 'should provide #repository' do + ModelSpec::Resource.should respond_to(:repository) + end + + describe '#repository' do + it 'should delegate to DataMapper.repository' do + repository = mock('repository') + DataMapper.should_receive(:repository).with(:legacy).and_return(repository) + ModelSpec::Resource.repository(:legacy).should == repository + end + + it 'should use default repository when not passed any arguments' do + ModelSpec::Resource.repository.name.should == ModelSpec::Resource.repository(:default).name + end + end + + it 'should provide #storage_name' do + ModelSpec::Resource.should respond_to(:storage_name) + end + + describe '#storage_name' do + it 'should map a repository to the storage location' do + ModelSpec::Resource.storage_name(:legacy).should == 'legacy_resource' + end + + it 'should use default repository when not passed any arguments' do + ModelSpec::Resource.storage_name.object_id.should == ModelSpec::Resource.storage_name(:default).object_id + end + end + + it 'should provide #storage_names' do + ModelSpec::Resource.should respond_to(:storage_names) + end + + describe '#storage_names' do + it 'should return a Hash mapping each repository to a storage location' do + ModelSpec::Resource.storage_names.should be_kind_of(Hash) + ModelSpec::Resource.storage_names.should == { :default => 'model_spec_resources', :legacy => 'legacy_resource' } + end + end + + it 'should provide #property' do + ModelSpec::Resource.should respond_to(:property) + end + + describe '#property' do + it 'should raise a SyntaxError when the name contains invalid characters' do + lambda { + ModelSpec::Resource.property(:"with space", TrueClass) + }.should raise_error(SyntaxError) + end + end + + it 'should provide #properties' do + ModelSpec::Resource.should respond_to(:properties) + end + + describe '#properties' do + it 'should return an PropertySet' do + ModelSpec::Resource.properties(:legacy).should be_kind_of(DataMapper::PropertySet) + ModelSpec::Resource.properties(:legacy).should have(3).entries + end + + it 'should use default repository when not passed any arguments' do + ModelSpec::Resource.properties.object_id.should == ModelSpec::Resource.properties(:default).object_id + end + end + + it 'should provide #key' do + ModelSpec::Resource.should respond_to(:key) + end + + describe '#key' do + it 'should return an Array of Property objects' do + ModelSpec::Resource.key(:legacy).should be_kind_of(Array) + ModelSpec::Resource.key(:legacy).should have(1).entries + ModelSpec::Resource.key(:legacy).first.should be_kind_of(DataMapper::Property) + end + + it 'should use default repository when not passed any arguments' do + ModelSpec::Resource.key.should == ModelSpec::Resource.key(:default) + end + + it 'should not cache the key value' do + class GasGiant < ModelSpec::Resource + end + + GasGiant.key.object_id.should_not == ModelSpec::Resource.key(:default) + + # change the key and make sure the Array changes + GasGiant.key == GasGiant.properties.slice(:id) + GasGiant.property(:new_prop, String, :key => true) + GasGiant.key.object_id.should_not == ModelSpec::Resource.key(:default) + GasGiant.key == GasGiant.properties.slice(:id, :new_prop) + end + end + + it 'should provide #get' do + ModelSpec::Resource.should respond_to(:get) + end + + it 'should provide #first' do + ModelSpec::Resource.should respond_to(:first) + end + + it 'should provide #all' do + ModelSpec::Resource.should respond_to(:all) + end + + it 'should provide #storage_exists?' do + ModelSpec::Resource.should respond_to(:storage_exists?) + end + + describe '#storage_exists?' do + it 'should return whether or not the storage exists' do + ModelSpec::Resource.should_receive(:repository).with(:default) do + repository = mock('repository') + repository.should_receive(:storage_exists?).with('model_spec_resources').and_return(true) + repository + end + ModelSpec::Resource.storage_exists?.should == true + end + end + + it 'should provide #default_order' do + ModelSpec::Resource.should respond_to(:default_order) + end + + describe '#default_order' do + it 'should be equal to #key by default' do + ModelSpec::Resource.default_order.should == [ DataMapper::Query::Direction.new(ModelSpec::Resource.properties[:id], :asc) ] + end + end + + describe '#append_inclusions' do + before(:all) do + @standard_resource_inclusions = DataMapper::Resource.instance_variable_get('@extra_inclusions') + @standard_model_extensions = DataMapper::Model.instance_variable_get('@extra_extensions') + end + + before(:each) do + DataMapper::Resource.instance_variable_set('@extra_inclusions', []) + DataMapper::Model.instance_variable_set('@extra_extensions', []) + + @module = Module.new do + def greet + hi_mom! + end + end + + @another_module = Module.new do + def hello + hi_dad! + end + end + + @class = Class.new + + @class_code = %{ + include DataMapper::Resource + property :id, Serial + } + end + + after(:each) do + DataMapper::Resource.instance_variable_set('@extra_inclusions', @standard_resource_inclusions) + DataMapper::Model.instance_variable_set('@extra_extensions', @standard_model_extensions) + end + + it "should append the module to be included in resources" do + DataMapper::Resource.append_inclusions @module + @class.class_eval(@class_code) + + instance = @class.new + instance.should_receive(:hi_mom!) + instance.greet + end + + it "should append the module to all resources" do + DataMapper::Resource.append_inclusions @module + + objects = (1..5).map do + the_class = Class.new + the_class.class_eval(@class_code) + + instance = the_class.new + instance.should_receive(:hi_mom!) + instance + end + + objects.each { |obj| obj.greet } + end + + it "should append multiple modules to be included in resources" do + DataMapper::Resource.append_inclusions @module, @another_module + @class.class_eval(@class_code) + + instance = @class.new + instance.should_receive(:hi_mom!) + instance.should_receive(:hi_dad!) + instance.greet + instance.hello + end + + it "should include the appended modules in order" do + module_one = Module.new do + def self.included(base); base.hi_mom!; end; + end + + module_two = Module.new do + def self.included(base); base.hi_dad!; end; + end + + DataMapper::Resource.append_inclusions module_two, module_one + + @class.should_receive(:hi_dad!).once.ordered + @class.should_receive(:hi_mom!).once.ordered + + @class.class_eval(@class_code) + end + + it "should append the module to extend resources with" do + DataMapper::Model.append_extensions @module + @class.class_eval(@class_code) + + @class.should_receive(:hi_mom!) + @class.greet + end + + it "should extend all resources with the module" do + DataMapper::Model.append_extensions @module + + classes = (1..5).map do + the_class = Class.new + the_class.class_eval(@class_code) + the_class.should_receive(:hi_mom!) + the_class + end + + classes.each { |cla| cla.greet } + end + + it "should append multiple modules to extend resources with" do + DataMapper::Model.append_extensions @module, @another_module + @class.class_eval(@class_code) + + @class.should_receive(:hi_mom!) + @class.should_receive(:hi_dad!) + @class.greet + @class.hello + end + + it "should extend the resource in the order that the modules were appended" do + @module.class_eval do + def self.extended(base); base.hi_mom!; end; + end + + @another_module.class_eval do + def self.extended(base); base.hi_dad!; end; + end + + DataMapper::Model.append_extensions @another_module, @module + + @class.should_receive(:hi_dad!).once.ordered + @class.should_receive(:hi_mom!).once.ordered + + @class.class_eval(@class_code) + end + + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/naming_conventions_spec.rb b/vendor/dm-core-0.9.6/spec/unit/naming_conventions_spec.rb new file mode 100644 index 0000000..343056b --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/naming_conventions_spec.rb @@ -0,0 +1,36 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe "DataMapper::NamingConventions" do + describe "Resource" do + it "should coerce a string into the Underscored convention" do + DataMapper::NamingConventions::Resource::Underscored.call('User').should == 'user' + DataMapper::NamingConventions::Resource::Underscored.call('UserAccountSetting').should == 'user_account_setting' + end + + it "should coerce a string into the UnderscoredAndPluralized convention" do + DataMapper::NamingConventions::Resource::UnderscoredAndPluralized.call('User').should == 'users' + DataMapper::NamingConventions::Resource::UnderscoredAndPluralized.call('UserAccountSetting').should == 'user_account_settings' + end + + it "should coerce a string into the UnderscoredAndPluralized convention joining namespace with underscore" do + DataMapper::NamingConventions::Resource::UnderscoredAndPluralized.call('Model::User').should == 'model_users' + DataMapper::NamingConventions::Resource::UnderscoredAndPluralized.call('Model::UserAccountSetting').should == 'model_user_account_settings' + end + + it "should coerce a string into the UnderscoredAndPluralizedWithoutModule convention" do + DataMapper::NamingConventions::Resource::UnderscoredAndPluralizedWithoutModule.call('Model::User').should == 'users' + DataMapper::NamingConventions::Resource::UnderscoredAndPluralizedWithoutModule.call('Model::UserAccountSetting').should == 'user_account_settings' + end + + it "should coerce a string into the Yaml convention" do + DataMapper::NamingConventions::Resource::Yaml.call('UserSetting').should == 'user_settings.yaml' + DataMapper::NamingConventions::Resource::Yaml.call('User').should == 'users.yaml' + end + end + + describe "Field" do + it "should accept a property as input" do + DataMapper::NamingConventions::Field::Underscored.call(Article.blog_id).should == 'blog_id' + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/property_set_spec.rb b/vendor/dm-core-0.9.6/spec/unit/property_set_spec.rb new file mode 100644 index 0000000..5e04a72 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/property_set_spec.rb @@ -0,0 +1,90 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +class Icon + include DataMapper::Resource + + property :id, Serial + property :name, String + property :width, Integer, :lazy => true + property :height, Integer, :lazy => true +end + +class Boat + include DataMapper::Resource + property :name, String #not lazy + property :text, DataMapper::Types::Text #Lazy by default + property :notes, String, :lazy => true + property :a1, String, :lazy => [:ctx_a,:ctx_c] + property :a2, String, :lazy => [:ctx_a,:ctx_b] + property :a3, String, :lazy => [:ctx_a] + property :b1, String, :lazy => [:ctx_b] + property :b2, String, :lazy => [:ctx_b] + property :b3, String, :lazy => [:ctx_b] +end + +describe DataMapper::PropertySet do + before :each do + @properties = Icon.properties(:default).dup + end + + it "#slice should find properties" do + @properties.slice(:name, 'width').should have(2).entries + end + + it "#select should find properties" do + @properties.select { |property| property.primitive == Integer }.should have(3).entries + end + + it "#clear should clear out set" do + @properties.clear + @properties.key.should == [] + @properties.defaults.should == [] + @properties.length.should == 0 + end + + it "#[] should find properties by name (Symbol or String)" do + default_properties = [ :id, 'name', :width, 'height' ] + @properties.each_with_index do |property,i| + property.should == @properties[default_properties[i]] + end + end + + it "should provide defaults" do + @properties.defaults.should have(2).entries + @properties.should have(4).entries + end + + it 'should add a property for lazy loading to the :default context if a context is not supplied' do + Boat.properties(:default).lazy_context(:default).length.should == 2 # text & notes + end + + it 'should return a list of contexts that a given field is in' do + props = Boat.properties(:default) + set = props.property_contexts(:a1) + set.include?(:ctx_a).should == true + set.include?(:ctx_c).should == true + set.include?(:ctx_b).should == false + end + + it 'should return a list of expanded fields that should be loaded with a given field' do + props = Boat.properties(:default) + set = props.lazy_load_context(:a2) + expect = [:a1,:a2,:a3,:b1,:b2,:b3] + expect.should == set.sort! {|a,b| a.to_s <=> b.to_s} + end + + describe 'when dup\'ed' do + it 'should duplicate the @entries ivar' do + @properties.dup.entries.should_not equal(@properties.entries) + end + + it 'should reinitialize @properties_for' do + # force @properties_for to hold a property + Icon.properties(:default)[:name].should_not be_nil + @properties = Icon.properties(:default) + + @properties.instance_variable_get("@property_for").should_not be_empty + @properties.dup.instance_variable_get("@property_for").should be_empty + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/property_spec.rb b/vendor/dm-core-0.9.6/spec/unit/property_spec.rb new file mode 100644 index 0000000..2ef489a --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/property_spec.rb @@ -0,0 +1,753 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe DataMapper::Property do + before :each do + Object.send(:remove_const, :Zoo) if defined?(Zoo) + class Zoo + include DataMapper::Resource + + property :id, DataMapper::Types::Serial + end + + Object.send(:remove_const, :Name) if defined?(Name) + class Name < DataMapper::Type + primitive String + track :hash + + def self.load(value, property) + value.split(", ").reverse + end + + def self.dump(value, property) + value && value.reverse.join(", ") + end + + def self.typecast(value, property) + value + end + end + + Object.send(:remove_const, :Tomato) if defined?(Tomato) + class Tomato + include DataMapper::Resource + end + end + + describe '.new' do + [ Float, BigDecimal ].each do |primitive| + describe "with a #{primitive} primitive" do + it 'should raise an ArgumentError if precision is 0' do + lambda { + Zoo.class_eval <<-RUBY + property :test, #{primitive}, :precision => 0 + RUBY + }.should raise_error(ArgumentError) + end + + it "raises an ArgumentError if precision is less than 0" do + lambda { + Zoo.class_eval <<-RUBY + property :test, #{primitive}, :precision => -1 + RUBY + }.should raise_error(ArgumentError) + end + + it 'should raise an ArgumentError if scale is less than 0' do + lambda { + Zoo.class_eval <<-RUBY + property :test, #{primitive}, :scale => -1 + RUBY + }.should raise_error(ArgumentError) + end + + it 'should raise an ArgumentError if precision is less than scale' do + lambda { + Zoo.class_eval <<-RUBY + property :test, #{primitive}, :precision => 1, :scale => 2 + RUBY + }.should raise_error(ArgumentError) + end + end + end + end + + describe '#field' do + before(:each) do + Zoo.class_eval do + property :location, String, :field => "City" + + repository(:mock) do + property :location, String, :field => "MockCity" + end + end + end + + it 'should accept a custom field' do + Zoo.properties[:location].field.should == 'City' + end + + # How is this supposed to work? + it 'should use repository name if passed in' do + pending + Zoo.properties[:location].field(:mock).should == 'MockCity' + end + end + + describe '#get' do + before do + Zoo.class_eval do + property :name, String, :default => "San Diego" + property :address, String + end + @resource = Zoo.new + end + + describe 'when setting the default on initial access' do + it 'should set the ivar to the default' do + @resource.name.should == 'San Diego' + end + + it 'should set the original value to nil' do + @resource.original_values[:name].should == nil + end + end + + it "should not reload the default if you set the property to nil" do + @resource.name = nil + @resource.name.should == nil + end + end + + describe '#get, when tracking via :hash' do + before do + Zoo.class_eval do + property :name, String, :lazy => true, :track => :hash + end + Zoo.auto_migrate! + @resource = Zoo.create(:name => "San Diego") + end + + describe 'when setting the default on initial access' do + it 'should set the ivar to the default' do + @resource.name.should == "San Diego" + end + + it 'should set the original value to nil' do + @resource.name + @resource.original_values[:name].should == "San Diego".hash + end + + it "should know it's dirty if a change was made to the object" do + @resource.name.upcase! + @resource.should be_dirty + end + end + end + + describe '#get, when tracking via :get' do + before do + Zoo.class_eval do + property :name, String + end + Zoo.auto_migrate! + @resource = Zoo.create(:name => "San Diego") + end + + describe 'when setting the default on initial access' do + it 'should set the ivar to the default' do + @resource.name.should == "San Diego" + end + + it 'should set the original value to "San Diego"' do + @resource.name + @resource.original_values[:name].should == "San Diego" + end + end + + it "should know it's dirty if a change was made to the object" do + @resource.name.upcase! + @resource.name + @resource.should be_dirty + @resource.original_values[:name].should == "San Diego" + end + end + + describe 'with Proc defaults' do + it "calls the proc" do + Zoo.class_eval do + property :name, String, :default => proc {|r,p| "San Diego"} + property :address, String + end + + Zoo.new.name.should == "San Diego" + end + + it "provides the resource to the proc" do + Zoo.class_eval do + property :name, String, :default => proc {|r,p| r.address} + property :address, String + end + + zoo = Zoo.new + zoo.address = "San Diego" + zoo.name.should == "San Diego" + end + + it "provides the property to the proc" do + Zoo.class_eval do + property :name, String, :default => proc {|r,p| p.name.to_s} + end + + zoo = Zoo.new + zoo.name.should == "name" + end + end + + + describe '#get!' do + it 'should get the resource' do + Zoo.class_eval do + property :name, String + end + + resource = Zoo.new(:name => "Portland Zoo") + resource.name.should == "Portland Zoo" + end + end + + describe '#set' do + before(:each) do + Zoo.class_eval do + property :name, String + property :age, Integer + property :description, String, :lazy => true + end + Zoo.auto_migrate! + Zoo.create(:name => "San Diego Zoo", :age => 888, + :description => "Great Zoo") + @resource = Zoo.new + end + + it 'should typecast the value' do + @resource.age = "888" + @resource.age.should == 888 + end + + it "should lazy load itself first" do + resource = Zoo.first + resource.description = "Still a Great Zoo" + resource.original_values[:description].should == "Great Zoo" + end + + it "should only set original_values once" do + resource = Zoo.first + resource.description = "Still a Great Zoo" + resource.description = "What can I say. This is one great Zoo" + resource.original_values[:description].should == "Great Zoo" + end + end + + describe '#set!' do + before do + Zoo.class_eval do + property :name, String + property :age, Integer + end + end + + it 'should set the resource' do + resource = Zoo.new + resource.name = "Seattle Zoo" + resource.name.should == "Seattle Zoo" + end + end + + # What is this for? + # --- + # it "should evaluate two similar properties as equal" do + # p1 = DataMapper::Property.new(Zoo, :name, String, { :size => 30 }) + # p2 = DataMapper::Property.new(Zoo, :name, String, { :size => 30 }) + # p3 = DataMapper::Property.new(Zoo, :title, String, { :size => 30 }) + # p1.eql?(p2).should == true + # p1.hash.should == p2.hash + # p1.eql?(p3).should == false + # p1.hash.should_not == p3.hash + # end + + it "should create a String property" do + Zoo.class_eval do + property :name, String, :size => 30 + end + + resource = Zoo.new + resource.name = 100 + resource.name.should == "100" + end + + it "should not have key that is lazy" do + Zoo.class_eval do + property :id, DataMapper::Types::Text, :key => true + property :name, String, :lazy => true + end + Zoo.auto_migrate! + + Zoo.create(:id => "100", :name => "San Diego Zoo") + zoo = Zoo.first + + # Do we mean for attribute_loaded? to be public? + zoo.attribute_loaded?(:id).should == true + end + + it "should lazily load other non-loaded, non-lazy fields" do + # This somewhat contorted setup is to successfully test that + # the list of eager properties to be loaded when it's initially + # missing is, in fact, repository-scoped + Zoo.class_eval do + property :id, DataMapper::Types::Serial + property :name, String, :lazy => true + property :address, String, :lazy => true + + repository(:default2) do + property :name, String + property :address, String + end + end + + repository(:default2) do + Zoo.auto_migrate! + Zoo.create(:name => "San Diego Zoo", :address => "San Diego") + end + repository(:default2) do + zoo = Zoo.first(:fields => [:id]) + + zoo.attribute_loaded?(:name).should == false + zoo.attribute_loaded?(:address).should == false + zoo.name + zoo.attribute_loaded?(:name).should == true + zoo.attribute_loaded?(:address).should == true + end + end + + it "should use a custom type Name property" do + Zoo.class_eval do + property :name, Name + end + + Zoo.auto_migrate! + + zoo = Zoo.create(:name => %w(Zoo San\ Diego)) + Zoo.first.name.should == %w(Zoo San\ Diego) + end + + it "should override type options with property options" do + Zoo.class_eval do + property :name, Name, :track => :get + end + + Zoo.auto_migrate! + + Zoo.create(:name => %w(Awesome Person\ Dude)) + zoo = Zoo.first + zoo.name = %w(Awesome Person\ Dude) + + # If we were tracking by hash, this would cause zoo to be dirty, + # as its hash would not match the original. Since we've overridden + # and are tracking by :get, it won't be dirty + zoo.name.stub!(:hash).and_return(1) + zoo.should_not be_dirty + end + + describe "public details" do + before do + Zoo.class_eval do + property :botanical_name, String, :nullable => true, :lazy => true + property :colloquial_name, DataMapper::Types::Text, :default => "Tomato" + end + Zoo.auto_migrate! + end + + it "should determine nullness" do + Zoo.properties[:botanical_name].options[:nullable].should be_true + end + + it "should determine its name" do + Zoo.properties[:botanical_name].name.should == :botanical_name + end + + # lazy? is not exposed to or used by the adapters, so it should be tested indirectly + it "should determine laziness" do + Zoo.create(:botanical_name => "Calystegia sepium") + Zoo.first.attribute_loaded?(:botanical_name).should be_false + end + + it "should automatically set laziness to true on text fields" do + Zoo.create(:colloquial_name => "American hedge bindweed") + Zoo.first.attribute_loaded?(:colloquial_name).should be_false + end + + it "should determine whether it is a key" do + zoo = Zoo.create(:botanical_name => "Calystegia sepium") + id = zoo.id + Zoo.first.id.should == id + end + + it "should determine whether it is serial" do + zoo = Zoo.create(:botanical_name => "Calystegia sepium") + zoo.id.should_not be_nil + end + + it "should determine a default value" do + zoo = Zoo.new + zoo.colloquial_name.should == "Tomato" + end + end + + describe "reader and writer visibility" do + # parameter passed to Property.new # reader | writer visibility + { + {} => [:public, :public], + { :accessor => :public } => [:public, :public], + { :accessor => :protected } => [:protected, :protected], + { :accessor => :private } => [:private, :private], + { :reader => :public } => [:public, :public], + { :reader => :protected } => [:protected, :public], + { :reader => :private } => [:private, :public], + { :writer => :public } => [:public, :public], + { :writer => :protected } => [:public, :protected], + { :writer => :private } => [:public, :private], + { :reader => :public, :writer => :public } => [:public, :public], + { :reader => :public, :writer => :protected } => [:public, :protected], + { :reader => :public, :writer => :private } => [:public, :private], + { :reader => :protected, :writer => :public } => [:protected, :public], + { :reader => :protected, :writer => :protected } => [:protected, :protected], + { :reader => :protected, :writer => :private } => [:protected, :private], + { :reader => :private, :writer => :public } => [:private, :public], + { :reader => :private, :writer => :protected } => [:private, :protected], + { :reader => :private, :writer => :private } => [:private, :private], + }.each do |input, output| + it "#{input.inspect} should make reader #{output[0]} and writer #{output[1]}" do + Tomato.class_eval <<-RUBY + property :botanical_name, String, #{input.inspect} + RUBY + Tomato.send("#{output[0]}_instance_methods").should include("botanical_name") + Tomato.send("#{output[1]}_instance_methods").should include("botanical_name=") + end + end + + [ + { :accessor => :junk }, + { :reader => :junk }, + { :writer => :junk }, + { :reader => :public, :writer => :junk }, + { :reader => :protected, :writer => :junk }, + { :reader => :private, :writer => :junk }, + { :reader => :junk, :writer => :public }, + { :reader => :junk, :writer => :protected }, + { :reader => :junk, :writer => :private }, + { :reader => :junk, :writer => :junk }, + { :reader => :junk, :writer => :junk }, + { :reader => :junk, :writer => :junk }, + ].each do |input| + it "#{input.inspect} should raise ArgumentError" do + lambda { + Tomato.class_eval <<-RUBY + property :family, String, #{input.inspect} + RUBY + }.should raise_error(ArgumentError) + end + end + end + + # This is handled by get! + # --- + # it "should return an instance variable name" do + # DataMapper::Property.new(Tomato, :flavor, String, {}).instance_variable_name.should == '@flavor' + # DataMapper::Property.new(Tomato, :ripe, TrueClass, {}).instance_variable_name.should == '@ripe' #not @ripe? + # end + + it "should append ? to TrueClass property reader methods" do + class Potato + include DataMapper::Resource + property :id, Integer, :key => true + property :fresh, TrueClass + property :public, TrueClass + end + + Potato.new(:fresh => true).should be_fresh + end + + it "should move unknown options into Property#extra_options" do + Tomato.class_eval do + property :botanical_name, String, :foo => :bar + end + Tomato.properties[:botanical_name].extra_options.should == {:foo => :bar} + end + + it 'should provide #custom?' do + Zoo.class_eval do + property :name, Name, :size => 50 + property :state, String, :size => 2 + end + Zoo.properties[:name].should be_custom + Zoo.properties[:state].should_not be_custom + end + + it "should set the field to the correct field_naming_convention" do + Zoo.class_eval { property :species, String } + Tomato.class_eval { property :genetic_history, DataMapper::Types::Text } + + Zoo.properties[:species].field.should == "species" + Tomato.properties[:genetic_history].field.should == "genetic_history" + end + + it "should provide the primitive mapping" do + Zoo.class_eval do + property :poverty, String + property :fortune, DataMapper::Types::Text + end + + Zoo.properties[:poverty].primitive.should == String + Zoo.properties[:fortune].primitive.should == String + end + + it "should make it possible to define an integer size" do + Zoo.class_eval { property :cleanliness, String, :size => 100 } + Zoo.properties[:cleanliness].size.should == 100 + end + + it "should make it possible to define an integer length (which defines size)" do + Zoo.class_eval { property :cleanliness, String, :length => 100 } + Zoo.properties[:cleanliness].size.should == 100 + end + + it "should make it possible to define a range size" do + Zoo.class_eval { property :cleanliness, String, :size => 0..100 } + Zoo.properties[:cleanliness].size.should == 100 + end + + it "should make it possible to define a range length (which defines size)" do + Zoo.class_eval { property :cleanliness, String, :length => 0..100 } + Zoo.properties[:cleanliness].size.should == 100 + end + + describe '#typecast' do + def self.format(value) + case value + when BigDecimal then "BigDecimal(#{value.to_s('F').inspect})" + when Float, Integer, String then "#{value.class}(#{value.inspect})" + else value.inspect + end + end + + it 'should pass through the value if it is the same type when typecasting' do + Zoo.class_eval do + property :name, String + end + zoo = Zoo.new + value = "San Diego" + def value.to_s() "San Francisco" end + zoo.name = value + zoo.name.should == "San Diego" + end + + it 'should pass through the value nil when typecasting' do + Zoo.class_eval do + property :name, String + end + + zoo = Zoo.new + zoo.name = nil + zoo.name.should == nil + end + + it 'should pass through the value for an Object property' do + value = Object.new + Zoo.class_eval do + property :object, Object + end + + zoo = Zoo.new + zoo.object = value + zoo.object.object_id.should == value.object_id + end + + [ true, 'true', 'TRUE', 1, '1', 't', 'T' ].each do |value| + it "should typecast #{value.inspect} to true for a TrueClass property" do + Zoo.class_eval do + property :boolean, TrueClass + end + + zoo = Zoo.new + zoo.boolean = value + zoo.boolean.should == true + end + end + + [ false, 'false', 'FALSE', 0, '0', 'f', 'F' ].each do |value| + it "should typecast #{value.inspect} to false for a Boolean property" do + Zoo.class_eval do + property :boolean, TrueClass + end + + zoo = Zoo.new + zoo.boolean = value + zoo.boolean.should == false + end + end + + it 'should typecast nil to nil for a Boolean property' do + Zoo.class_eval do + property :boolean, TrueClass + end + + zoo = Zoo.new + zoo.boolean = nil + zoo.boolean.should == nil + end + + it 'should typecast "0" to "0" for a String property' do + Zoo.class_eval do + property :string, String + end + + zoo = Zoo.new + zoo.string = "0" + zoo.string.should == "0" + end + + { '0' => 0.0, '0.0' => 0.0, 0 => 0.0, 0.0 => 0.0, BigDecimal('0.0') => 0.0 }.each do |value,expected| + it "should typecast #{format(value)} to #{format(expected)} for a Float property" do + Zoo.class_eval do + property :float, Float + end + + zoo = Zoo.new + zoo.float = value + zoo.float.should == expected + end + end + + { '-8' => -8, '-8.0' => -8, -8 => -8, -8.0 => -8, BigDecimal('8.0') => 8, + '0' => 0, '0.0' => 0, 0 => 0, 0.0 => 0, BigDecimal('0.0') => 0, + '5' => 5, '5.0' => 5, 5 => 5, 5.0 => 5, BigDecimal('5.0') => 5, + 'none' => nil, 'almost 5' => nil, '-3 change' => -3, '9 items' => 9}.each do |value,expected| + it "should typecast #{format(value)} to #{format(expected)} for an Integer property" do + Zoo.class_eval do + property :int, Integer + end + + zoo = Zoo.new + zoo.int = value + zoo.int.should == expected + end + end + + { '0' => BigDecimal('0'), '0.0' => BigDecimal('0.0'), 0.0 => BigDecimal('0.0'), BigDecimal('0.0') => BigDecimal('0.0') }.each do |value,expected| + it "should typecast #{format(value)} to #{format(expected)} for a BigDecimal property" do + Zoo.class_eval do + property :big_decimal, BigDecimal + end + + zoo = Zoo.new + zoo.big_decimal = value + zoo.big_decimal.should == expected + end + end + + it 'should typecast value for a DateTime property' do + Zoo.class_eval { property :date_time, DateTime } + zoo = Zoo.new + zoo.date_time = '2000-01-01 00:00:00' + zoo.date_time.should == DateTime.new(2000, 1, 1, 0, 0, 0) + end + + it 'should typecast value for a Date property' do + Zoo.class_eval { property :date, Date } + zoo = Zoo.new + zoo.date = '2000-01-01' + zoo.date.should == Date.new(2000, 1, 1) + end + + it 'should typecast value for a Time property' do + Zoo.class_eval { property :time, Time } + zoo = Zoo.new + zoo.time = '2000-01-01 01:01:01.123456' + zoo.time.should == Time.local(2000, 1, 1, 1, 1, 1, 123456) + end + + it 'should typecast Hash for a Time property' do + Zoo.class_eval { property :time, Time } + zoo = Zoo.new + zoo.time = {:year => 2002, "month" => 1, :day => 1, "hour" => 12, :min => 0, :sec => 0} + zoo.time.should == Time.local(2002, 1, 1, 12, 0, 0) + end + + it 'should typecast Hash for a Date property' do + Zoo.class_eval { property :date, Date } + zoo = Zoo.new + zoo.date = {:year => 2002, "month" => 1, :day => 1} + zoo.date.should == Date.new(2002, 1, 1) + end + + it 'should typecast Hash for a DateTime property' do + Zoo.class_eval { property :date_time, DateTime } + zoo = Zoo.new + zoo.date_time = {:year => 2002, :month => 1, :day => 1, "hour" => 12, :min => 0, "sec" => 0} + zoo.date_time.should == DateTime.new(2002, 1, 1, 12, 0, 0) + end + + it 'should use now as defaults for missing parts of a Hash to Time typecast' do + now = Time.now + Zoo.class_eval { property :time, Time } + zoo = Zoo.new + zoo.time = {:month => 1, :day => 1} + zoo.time.should == Time.local(now.year, 1, 1, now.hour, now.min, now.sec) + end + + it 'should use now as defaults for missing parts of a Hash to Date typecast' do + now = Time.now + Zoo.class_eval { property :date, Date } + zoo = Zoo.new + zoo.date = {:month => 1, :day => 1} + zoo.date.should == Date.new(now.year, 1, 1) + end + + it 'should use now as defaults for missing parts of a Hash to DateTime typecast' do + now = Time.now + Zoo.class_eval { property :date_time, DateTime } + zoo = Zoo.new + zoo.date_time = {:month => 1, :day => 1} + zoo.date_time.should == DateTime.new(now.year, 1, 1, now.hour, now.min, now.sec) + end + + it 'should rescue after trying to typecast an invalid Date value from a hash' do + now = Time.now + Zoo.class_eval { property :date, Date } + zoo = Zoo.new + zoo.date = {:year => 2002, :month => 2, :day => 31} + zoo.date.should == Date.new(2002, 3, 3) + end + + it 'should rescue after trying to typecast an invalid DateTime value from a hash' do + now = Time.now + Zoo.class_eval { property :date_time, DateTime } + zoo = Zoo.new + zoo.date_time = { + :year => 2002, :month => 2, :day => 31, :hour => 12, :min => 0, :sec => 0 + } + zoo.date_time.should == DateTime.new(2002, 3, 3, 12, 0, 0) + end + + it 'should typecast value for a Class property' do + Zoo.class_eval { property :klass, Class } + zoo = Zoo.new + zoo.klass = "Zoo" + zoo.klass.should == Zoo + end + end + + it 'should return an abbreviated representation of the property when inspected' do + Zoo.class_eval { property :name, String } + Zoo.properties[:name].inspect.should == '#' + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/query_spec.rb b/vendor/dm-core-0.9.6/spec/unit/query_spec.rb new file mode 100644 index 0000000..40d19cf --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/query_spec.rb @@ -0,0 +1,571 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +GOOD_OPTIONS = [ + [ :reload, false ], + [ :reload, true ], + [ :offset, 0 ], + [ :offset, 1 ], + [ :limit, 1 ], + [ :limit, 2 ], + [ :order, [ DataMapper::Query::Direction.new(Article.properties[:created_at], :desc) ] ], + [ :fields, Article.properties.defaults.to_a ], # TODO: fill in allowed default value + #[ :links, [ :stub ] ], # TODO: fill in allowed default value + [ :includes, [ :stub ] ], # TODO: fill in allowed default value +] + +BAD_OPTIONS = { + :reload => 'true', + :offset => -1, + :limit => 0, +# :order => [], # TODO: spec conditions where :order may be empty +# :fields => [], # TODO: spec conditions where :fields may be empty + :links => [], + :includes => [], + :conditions => [], +} + +# flatten GOOD_OPTIONS into a Hash to remove default values, since +# default value, when defined, is always listed first in GOOD_OPTIONS +UPDATED_OPTIONS = GOOD_OPTIONS.inject({}) do |options,(attribute,value)| + options.update attribute => value +end + +UPDATED_OPTIONS.merge!({ :fields => [ :id, :author ]}) + +describe DataMapper::Query do + before do + @adapter = mock('adapter') + @repository = mock('repository', :kind_of? => true, :name => 'mock', :adapter => @adapter) + + @query = DataMapper::Query.new(@repository, Article) + end + + it 'should provide .new' do + DataMapper::Query.should respond_to(:new) + end + + describe '.new' do + describe 'should set the attribute' do + it '#model with model' do + query = DataMapper::Query.new(@repository, Article) + query.model.should == Article + end + + GOOD_OPTIONS.each do |(attribute,value)| + it "##{attribute} with options[:#{attribute}] if it is #{value.inspect}" do + query = DataMapper::Query.new(@repository, Article, attribute => value) + query.send(attribute == :reload ? :reload? : attribute).should == value + end + end + + describe ' #conditions with options[:conditions]' do + it 'when they are a Hash' do + query = DataMapper::Query.new(@repository, Article, :conditions => { :author => 'dkubb' }) + query.conditions.should == [ [ :eql, Article.properties[:author], 'dkubb' ] ] + end + + it 'when they have a one element Array' do + query = DataMapper::Query.new(@repository, Article, :conditions => [ 'name = "dkubb"' ]) + query.conditions.should == [ [ :raw, 'name = "dkubb"' ] ] + query.bind_values.should == [] + end + + it 'when they have a two or more element Array' do + bind_values = %w[ dkubb ] + query = DataMapper::Query.new(@repository, Article, :conditions => [ 'name = ?', *bind_values ]) + query.conditions.should == [ [ :raw, 'name = ?', bind_values ] ] + query.bind_values.should == bind_values + + bind_values = [ 'dkubb', 32 ] + query = DataMapper::Query.new(@repository, Article, :conditions => [ 'name = ? OR age = ?', *bind_values ]) + query.conditions.should == [ [ :raw, 'name = ? OR age = ?', bind_values ] ] + query.bind_values.should == bind_values + + bind_values = [ %w[ dkubb ssmoot ] ] + query = DataMapper::Query.new(@repository, Article, :conditions => [ 'name IN ?', *bind_values ]) + query.conditions.should == [ [ :raw, 'name IN ?', bind_values ] ] + query.bind_values.should == bind_values + end + + it 'when they have another DM:Query as the value of sub-select' do + class Acl + include DataMapper::Resource + property :id, Integer + property :resource_id, Integer + end + + acl_query = DataMapper::Query.new(@repository, Acl, :fields=>[:resource_id]) #this would normally have conditions + query = DataMapper::Query.new(@repository, Article, :id.in => acl_query) + query.conditions.each do |operator, property, value| + operator.should == :in + property.name.should == :id + value.should == acl_query + end + end + end + + describe ' #conditions with unknown options' do + it 'when a Symbol object is a key' do + query = DataMapper::Query.new(@repository, Article, :author => 'dkubb') + query.conditions.should == [ [ :eql, Article.properties[:author], 'dkubb' ] ] + end + + it 'when a Query::Operator object is a key' do + query = DataMapper::Query.new(@repository, Article, :author.like => /\Ad(?:an\.)kubb\z/) + query.conditions.should == [ [ :like, Article.properties[:author], /\Ad(?:an\.)kubb\z/ ] ] + end + end + + it '#order with model.default_order if none provided' do + query = DataMapper::Query.new(@repository, Article) + query.order.should == [ DataMapper::Query::Direction.new(Article.properties[:id], :asc) ] + end + end + + describe 'should raise an ArgumentError' do + it 'when repository is nil' do + lambda { + DataMapper::Query.new(nil, NormalClass) + }.should raise_error(ArgumentError) + end + + it 'when model is nil' do + lambda { + DataMapper::Query.new(@repository, nil) + }.should raise_error(ArgumentError) + end + + it 'when model is a Class that does not include DataMapper::Resource' do + lambda { + DataMapper::Query.new(@repository, NormalClass) + }.should raise_error(ArgumentError) + end + + it 'when options is not a Hash' do + lambda { + DataMapper::Query.new(@repository, Article, nil) + }.should raise_error(ArgumentError) + end + + BAD_OPTIONS.each do |attribute,value| + it "when options[:#{attribute}] is nil" do + lambda { + DataMapper::Query.new(@repository, Article, attribute => nil) + }.should raise_error(ArgumentError) + end + + it "when options[:#{attribute}] is #{value.kind_of?(Array) && value.empty? ? 'an empty Array' : value.inspect}" do + lambda { + DataMapper::Query.new(@repository, Article, attribute => value) + }.should raise_error(ArgumentError) + end + end + + it 'when unknown options use something that is not a Query::Operator, Symbol or String is a key' do + lambda { + DataMapper::Query.new(@repository, Article, nil => nil) + }.should raise_error(ArgumentError) + end + end + + describe 'should normalize' do + it '#fields' do + DataMapper::Query.new(@repository, Article, :fields => [:id]).fields.should == Article.properties.slice(:id) + end + end + + describe 'should translate custom types' do + before(:each) do + class Acl + include DataMapper::Resource + property :id, Integer + property :is_custom_type, DM::Boolean + end + end + it "should call Boolean#dump for :is_custom_type options" do + DM::Boolean.should_receive(:dump).with(:false, Acl.properties[:is_custom_type]) + DataMapper::Query.new(@repository, Acl, :is_custom_type => :false) + end + end + end + + it 'should provide #update' do + @query.should respond_to(:update) + end + + describe '#update' do + before do + @query = DataMapper::Query.new(@repository, Article, UPDATED_OPTIONS) + end + + it 'should instantiate a DataMapper::Query object from other when it is a Hash' do + other = { :reload => :true } + + @query.should_receive(:class).with(no_args).exactly(3).times.ordered.and_return(DataMapper::Query) + DataMapper::Query.should_receive(:new).with(@repository, @query.model, other).ordered.and_return(@query) + + @query.update(other) + end + + it 'should raise an ArgumentError if other query model is different' do + lambda { + other = DataMapper::Query.new(@repository, Comment) + @query.update(other) + }.should raise_error(ArgumentError) + end + + it 'should return self' do + other = DataMapper::Query.new(@repository, Article) + @query.update(other).should == @query + end + + describe 'should overwrite the attribute' do + it '#reload? with other reload?' do + other = DataMapper::Query.new(@repository, Article, :reload => true) + @query.update(other).reload?.should == true + end + + it '#offset with other offset when it is not equal to 0' do + other = DataMapper::Query.new(@repository, Article, :offset => 1) + @query.update(other).offset.should == 1 + end + + it '#limit with other limit when it is not nil' do + other = DataMapper::Query.new(@repository, Article, :limit => 1) + @query.update(other).limit.should == 1 + end + + it '#the operator if condition is the same and operater is changed (:not / :eql)' do + # especially needed for collection#update where you might do something like: + # all(:name.not => "John").update(:name => "John") + pending do + other = DataMapper::Query.new(@repository, Article, :author.not => "dkubb") + @query.update(other).conditions.should == [ [ :not, Article.properties[:author], 'dkubb' ] ] + @query.update(:author => "dkubb").conditions.should == [ [ :eql, Article.properties[:author], 'dkubb' ] ] + end + end + + [ :eql, :like ].each do |operator| + it "#conditions with other conditions when updating the '#{operator}' clause to a different value than in self" do + # set the initial conditions + @query.update(:author.send(operator) => 'ssmoot') + + # update the conditions, and overwrite with the new value + other = DataMapper::Query.new(@repository, Article, :author.send(operator) => 'dkubb') + @query.update(other).conditions.should == [ [ operator, Article.properties[:author], 'dkubb' ] ] + end + end + + [ :gt, :gte ].each do |operator| + it "#conditions with other conditions when updating the '#{operator}' clause to a value less than in self" do + # set the initial conditions + @query.update(:created_at.send(operator) => Time.at(1)) + + # update the conditions, and overwrite with the new value is less + other = DataMapper::Query.new(@repository, Article, :created_at.send(operator) => Time.at(0)) + @query.update(other).conditions.should == [ [ operator, Article.properties[:created_at], Time.at(0) ] ] + end + end + + [ :lt, :lte ].each do |operator| + it "#conditions with other conditions when updating the '#{operator}' clause to a value greater than in self" do + # set the initial conditions + @query.update(:created_at.send(operator) => Time.at(0)) + + # update the conditions, and overwrite with the new value is more + other = DataMapper::Query.new(@repository, Article, :created_at.send(operator) => Time.at(1)) + @query.update(other).conditions.should == [ [ operator, Article.properties[:created_at], Time.at(1) ] ] + end + end + + it "#order with other order unique values" do + order = [ + DataMapper::Query::Direction.new(Article.properties[:created_at], :desc), + DataMapper::Query::Direction.new(Article.properties[:author], :desc), + DataMapper::Query::Direction.new(Article.properties[:title], :desc), + ] + + other = DataMapper::Query.new(@repository, Article, :order => order) + @query.update(other).order.should == order + end + + # dkubb: I am not sure i understand the intent here. link now needs to be + # a DM::Assoc::Relationship or the name (Symbol or String) of an + # association on the Resource -- thx guyvdb + # + # NOTE: I have commented out :links in the GOOD_OPTIONS above + # + [ :links, :includes ].each do |attribute| + it "##{attribute} with other #{attribute} unique values" do + pending 'DataMapper::Query::Path not ready' + other = DataMapper::Query.new(@repository, Article, attribute => [ :stub, :other, :new ]) + @query.update(other).send(attribute).should == [ :stub, :other, :new ] + end + end + + it "#fields with other fields unique values" do + other = DataMapper::Query.new(@repository, Article, :fields => [ :blog_id ]) + @query.update(other).fields.should == Article.properties.slice(:blog_id) + end + + it '#conditions with other conditions when they are unique' do + # set the initial conditions + @query.update(:title => 'On DataMapper') + + # update the conditions, but merge the conditions together + other = DataMapper::Query.new(@repository, Article, :author => 'dkubb') + @query.update(other).conditions.should == [ [ :eql, Article.properties[:title], 'On DataMapper' ], [ :eql, Article.properties[:author], 'dkubb' ] ] + end + + [ :not, :in ].each do |operator| + it "#conditions with other conditions when updating the '#{operator}' clause" do + # set the initial conditions + @query.update(:created_at.send(operator) => [ Time.at(0) ]) + + # update the conditions, and overwrite with the new value is more + other = DataMapper::Query.new(@repository, Article, :created_at.send(operator) => [ Time.at(1) ]) + @query.update(other).conditions.should == [ [ operator, Article.properties[:created_at], [ Time.at(0), Time.at(1) ] ] ] + end + end + + it '#conditions with other conditions when they have a one element condition' do + # set the initial conditions + @query.update(:title => 'On DataMapper') + + # update the conditions, but merge the conditions together + other = DataMapper::Query.new(@repository, Article, :conditions => [ 'author = "dkubb"' ]) + @query.update(other).conditions.should == [ [ :eql, Article.properties[:title], 'On DataMapper' ], [ :raw, 'author = "dkubb"' ] ] + end + + it '#conditions with other conditions when they have a two or more element condition' do + # set the initial conditions + @query.update(:title => 'On DataMapper') + + # update the conditions, but merge the conditions together + other = DataMapper::Query.new(@repository, Article, :conditions => [ 'author = ?', 'dkubb' ]) + @query.update(other).conditions.should == [ [ :eql, Article.properties[:title], 'On DataMapper' ], [ :raw, 'author = ?', [ 'dkubb' ] ] ] + end + end + + describe 'should not update the attribute' do + it '#offset when other offset is equal to 0' do + other = DataMapper::Query.new(@repository, Article, :offset => 0) + other.offset.should == 0 + @query.update(other).offset.should == 1 + end + + it '#limit when other limit is nil' do + other = DataMapper::Query.new(@repository, Article) + other.limit.should be_nil + @query.update(other).offset.should == 1 + end + + [ :gt, :gte ].each do |operator| + it "#conditions with other conditions when they have a '#{operator}' clause with a value greater than in self" do + # set the initial conditions + @query.update(:created_at.send(operator) => Time.at(0)) + + # do not overwrite with the new value if it is more + other = DataMapper::Query.new(@repository, Article, :created_at.send(operator) => Time.at(1)) + @query.update(other).conditions.should == [ [ operator, Article.properties[:created_at], Time.at(0) ] ] + end + end + + [ :lt, :lte ].each do |operator| + it "#conditions with other conditions when they have a '#{operator}' clause with a value less than in self" do + # set the initial conditions + @query.update(:created_at.send(operator) => Time.at(1)) + + # do not overwrite with the new value if it is less + other = DataMapper::Query.new(@repository, Article, :created_at.send(operator) => Time.at(0)) + @query.update(other).conditions.should == [ [ operator, Article.properties[:created_at], Time.at(1) ] ] + end + end + end + end + + it 'should provide #merge' do + @query.should respond_to(:merge) + end + + describe '#merge' do + it 'should pass arguments as-is to duplicate object\'s #update method' do + dupe_query = @query.dup + @query.should_receive(:dup).with(no_args).ordered.and_return(dupe_query) + dupe_query.should_receive(:update).with(:author => 'dkubb').ordered + @query.merge(:author => 'dkubb') + end + + it 'should return the duplicate object' do + dupe_query = @query.merge(:author => 'dkubb') + @query.object_id.should_not == dupe_query.object_id + @query.merge(:author => 'dkubb').should == dupe_query + end + end + + it 'should provide #==' do + @query.should respond_to(:==) + end + + describe '#==' do + describe 'should be equal' do + it 'when other is same object' do + @query.update(:author => 'dkubb').should == @query + end + + it 'when other has the same attributes' do + other = DataMapper::Query.new(@repository, Article) + @query.object_id.should_not == other.object_id + @query.should == other + end + + it 'when other has the same conditions sorted differently' do + @query.update(:author => 'dkubb') + @query.update(:title => 'On DataMapper') + + other = DataMapper::Query.new(@repository, Article, :title => 'On DataMapper') + other.update(:author => 'dkubb') + + # query conditions are in different order + @query.conditions.should == [ [ :eql, Article.properties[:author], 'dkubb' ], [ :eql, Article.properties[:title], 'On DataMapper' ] ] + other.conditions.should == [ [ :eql, Article.properties[:title], 'On DataMapper' ], [ :eql, Article.properties[:author], 'dkubb' ] ] + + @query.should == other + end + end + + describe 'should be different' do + it 'when other model is different than self.model' do + @query.should_not == DataMapper::Query.new(@repository, Comment) + end + + UPDATED_OPTIONS.each do |attribute,value| + it "when other #{attribute} is different than self.#{attribute}" do + @query.should_not == DataMapper::Query.new(@repository, Article, attribute => value) + end + end + + it 'when other conditions are different than self.conditions' do + @query.should_not == DataMapper::Query.new(@repository, Article, :author => 'dkubb') + end + end + end + + it 'should provide #reverse' do + @query.should respond_to(:reverse) + end + + describe '#reverse' do + it 'should create a duplicate query and delegate to #reverse!' do + copy = @query.dup + copy.should_receive(:reverse!).with(no_args).and_return(@query) + @query.should_receive(:dup).with(no_args).and_return(copy) + + @query.reverse.should == @query + end + end + + it 'should provide #reverse!' do + @query.should respond_to(:reverse!) + end + + describe '#reverse!' do + it 'should update the query with the reverse order' do + normal_order = Article.key.map { |p| DataMapper::Query::Direction.new(p, :asc) } + reverse_order = Article.key.map { |p| DataMapper::Query::Direction.new(p, :desc) } + + normal_order.should_not be_empty + reverse_order.should_not be_empty + + @query.order.should == normal_order + @query.should_receive(:update).with(:order => reverse_order) + @query.reverse!.object_id.should == @query.object_id + end + end + + describe 'inheritance properties' do + before(:each) do + class Parent + include DataMapper::Resource + property :id, Serial + property :type, Discriminator + end + @query = DataMapper::Query.new(@repository, Parent) + @other_query = DataMapper::Query.new(@repository, Article) + end + + it 'should provide #inheritance_property' do + @query.should respond_to(:inheritance_property) + end + + describe '#inheritance_property' do + it 'should return a Property object if there is a Discriminator field' do + @query.inheritance_property.should be_kind_of(DataMapper::Property) + @query.inheritance_property.name.should == :type + @query.inheritance_property.type.should == DM::Discriminator + end + + it 'should return nil if there is no Discriminator field' do + @other_query.inheritance_property.should be_nil + end + end + + it 'should provide #inheritance_property_index' do + @query.should respond_to(:inheritance_property_index) + end + + describe '#inheritance_property_index' do + it 'should return integer index if there is a Discriminator field' do + @query.inheritance_property_index.should be_kind_of(Integer) + @query.inheritance_property_index.should == 1 + end + + it 'should return nil if there is no Discriminator field' + end + end +end + +describe DataMapper::Query::Operator do + before do + @operator = :thing.gte + end + + it 'should provide #==' do + @operator.should respond_to(:==) + end + + describe '#==' do + describe 'should be equal' do + it 'when other is same object' do + @operator.should == @operator + end + + it 'when other has the same target and operator' do + other = :thing.gte + @operator.target.should == other.target + @operator.operator.should == other.operator + @operator.should == other + end + end + + describe 'should be different' do + it 'when other class is not a descendant of self.class' do + other = :thing + other.class.should_not be_kind_of(@operator.class) + @operator.should_not == other + end + + it 'when other has a different target' do + other = :other.gte + @operator.target.should_not == other.target + @operator.should_not == other + end + + it 'when other has a different operator' do + other = :thing.gt + @operator.operator.should_not == other.operator + @operator.should_not == other + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/repository_spec.rb b/vendor/dm-core-0.9.6/spec/unit/repository_spec.rb new file mode 100644 index 0000000..33cdc9d --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/repository_spec.rb @@ -0,0 +1,93 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe DataMapper::Repository do + before do + @adapter = mock('adapter') + @identity_map = mock('identity map', :[]= => nil) + @identity_maps = mock('identity maps', :[] => @identity_map) + + @repository = repository(:mock) + @repository.stub!(:adapter).and_return(@adapter) + + # TODO: stub out other external dependencies in repository + end + + describe "managing transactions" do + it "should create a new Transaction with itself as argument when #transaction is called" do + transaction = mock('transaction') + DataMapper::Transaction.should_receive(:new).with(@repository).and_return(transaction) + @repository.transaction.should == transaction + end + end + + it 'should provide .storage_exists?' do + @repository.should respond_to(:storage_exists?) + end + + it '.storage_exists? should whether or not the storage exists' do + @adapter.should_receive(:storage_exists?).with(:vegetable).and_return(true) + + @repository.storage_exists?(:vegetable).should == true + end + + it "should provide persistance methods" do + @repository.should respond_to(:create) + @repository.should respond_to(:read_many) + @repository.should respond_to(:read_one) + @repository.should respond_to(:update) + @repository.should respond_to(:delete) + end + + it "should be reused in inner scope" do + DataMapper.repository(:default) do |outer_repos| + DataMapper.repository(:default) do |inner_repos| + outer_repos.object_id.should == inner_repos.object_id + end + end + end + + it 'should provide default_name' do + DataMapper::Repository.should respond_to(:default_name) + end + + it 'should return :default for default_name' do + DataMapper::Repository.default_name.should == :default + end + + describe "#migrate!" do + it "should call DataMapper::Migrator.migrate with itself as the repository argument" do + DataMapper::Migrator.should_receive(:migrate).with(@repository.name) + + @repository.migrate! + end + end + + describe "#auto_migrate!" do + it "should call DataMapper::AutoMigrator.auto_migrate with itself as the repository argument" do + DataMapper::AutoMigrator.should_receive(:auto_migrate).with(@repository.name) + + @repository.auto_migrate! + end + end + + describe "#auto_upgrade!" do + it "should call DataMapper::AutoMigrator.auto_upgrade with itself as the repository argument" do + DataMapper::AutoMigrator.should_receive(:auto_upgrade).with(@repository.name) + + @repository.auto_upgrade! + end + end + + describe "#map" do + it "should call type_map.map with the arguments" do + type_map = mock('type map') + + @adapter.class.should_receive(:type_map).and_return(type_map) + DataMapper::TypeMap.should_receive(:new).with(type_map).and_return(type_map) + + type_map.should_receive(:map).with(:type, :arg) + + @repository.map(:type, :arg) + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/resource_spec.rb b/vendor/dm-core-0.9.6/spec/unit/resource_spec.rb new file mode 100644 index 0000000..8b1cd73 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/resource_spec.rb @@ -0,0 +1,635 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe DataMapper::Resource do + + load_models_for_metaphor :zoo + + describe '#attributes' do + it 'should return a hash of attribute-names and values' do + zoo = Zoo.new + zoo.name = "San Francisco" + zoo.description = "This is a pretty awesome zoo" + zoo.attributes.should == { + :name => "San Francisco", :description => "This is a pretty awesome zoo", + :id => nil, :inception => nil, :open => false, :size => nil + } + end + + it "should return a hash with all nil values if the instance is new and has no default values" do + Species.new.attributes.should == { :id => nil, :name => nil } + end + + it 'should not include private attributes' do + Species.new.attributes.should == { :id => nil, :name => nil } + end + end + +# ---------- REPOSITORY WRITE METHODS --------------- + + describe '#save' do + + describe 'with a new resource' do + it 'should set defaults before create' + it 'should create when dirty' + it 'should create when non-dirty, and it has a serial key' + end + + describe 'with an existing resource' do + it 'should update' + end + + end +end + +# ---------- Old specs... BOOOOOOOOOO --------------- + +describe DataMapper::Resource do + before(:each) do + Object.send(:remove_const, :Planet) if defined?(Planet) + class Planet + include DataMapper::Resource + + storage_names[:legacy] = "dying_planets" + + property :id, Integer, :key => true + property :name, String + property :age, Integer + property :core, String, :accessor => :private + property :type, Discriminator + property :data, Object, :track => :get + + repository(:legacy) do + property :cowabunga, String + end + + def age + attribute_get(:age) + end + + def to_s + name + end + end + + Object.send(:remove_const, :Phone) if defined?(Phone) + class Phone + include DataMapper::Resource + + property :name, String, :key => true + property :awesomeness, Integer + end + + Object.send(:remove_const, :Fruit) if defined?(Fruit) + class Fruit + include DataMapper::Resource + + property :id, Integer, :key => true + property :name, String + end + + Object.send(:remove_const, :Grain) if defined?(Grain) + class Grain + include DataMapper::Resource + + property :id, Serial + property :name, String, :default => 'wheat' + end + + Object.send(:remove_const, :Vegetable) if defined?(Vegetable) + class Vegetable + include DataMapper::Resource + + property :id, Serial + property :name, String + end + + Object.send(:remove_const, :Banana) if defined?(Banana) + class Banana < Fruit + property :type, Discriminator + end + + Object.send(:remove_const, :Cyclist) if defined?(Cyclist) + class Cyclist + include DataMapper::Resource + property :id, Serial + property :victories, Integer + end + + Fruit.auto_migrate! + Planet.auto_migrate! + Cyclist.auto_migrate! + end + + it 'should provide #save' do + Planet.new.should respond_to(:save) + end + + describe '#save' do + before(:each) do + @adapter = repository(:default).adapter + end + + describe 'with a new resource' do + it 'should set defaults before create' do + resource = Grain.new + + resource.should_not be_dirty + resource.should be_new_record + resource.instance_variable_get('@name').should be_nil + + @adapter.should_receive(:create).with([ resource ]).and_return(1) + + resource.save.should be_true + + resource.instance_variable_get('@name').should == 'wheat' + end + + it 'should create when dirty' do + resource = Vegetable.new(:id => 1, :name => 'Potato') + + resource.should be_dirty + resource.should be_new_record + + @adapter.should_receive(:create).with([ resource ]).and_return(1) + + resource.save.should be_true + end + + it 'should create when non-dirty, and it has a serial key' do + resource = Vegetable.new + + resource.should_not be_dirty + resource.should be_new_record + resource.model.key.any? { |p| p.serial? }.should be_true + + @adapter.should_receive(:create).with([ resource ]).and_return(1) + + resource.save.should be_true + end + + it 'should not create when non-dirty, and is has a non-serial key' do + resource = Fruit.new + + resource.should_not be_dirty + resource.should be_new_record + resource.model.key.any? { |p| p.serial? }.should be_false + + resource.save.should be_false + end + + it 'should return true even if the object is not dirty' do + resource = Cyclist.new + resource.victories = "0 victories" + resource.save.should be_true + + resource.should_not be_dirty + resource.should_not be_new_record + resource.save.should be_true + end + + describe 'for integer fields' do + + it "should save strings without digits as nil" do + resource = Cyclist.new + resource.victories = "none" + resource.save.should be_true + resource.victories.should be_nil + end + + it "should save strings beginning with non-digits as nil" do + resource = Cyclist.new + resource.victories = "almost 5" + resource.save.should be_true + resource.victories.should be_nil + end + + it 'should save strings beginning with negative numbers as that number' do + resource = Cyclist.new + resource.victories = "-4 victories" + resource.save.should be_true + resource.victories.should == -4 + end + + it 'should save strings beginning with 0 as 0' do + resource = Cyclist.new + resource.victories = "0 victories" + resource.save.should be_true + resource.victories.should == 0 + end + + it 'should save strings beginning with positive numbers as that number' do + resource = Cyclist.new + resource.victories = "23 victories" + resource.save.should be_true + resource.victories.should == 23 + end + + end + + end + + describe 'with an existing resource' do + it 'should update' do + resource = Vegetable.new(:name => 'Potato') + resource.instance_variable_set('@new_record', false) + + resource.should be_dirty + resource.should_not be_new_record + + @adapter.should_receive(:update).with(resource.dirty_attributes, resource.to_query).and_return(1) + + resource.save.should be_true + end + end + end + + it "should be able to overwrite to_s" do + Planet.new(:name => 'Mercury').to_s.should == 'Mercury' + end + + describe "storage names" do + it "should use its class name by default" do + Planet.storage_name.should == "planets" + end + + it "should allow changing using #default_storage_name" do + Planet.class_eval <<-EOF.margin + @storage_names.clear + def self.default_storage_name + "Superplanet" + end + EOF + + Planet.storage_name.should == "superplanets" + Planet.class_eval <<-EOF.margin + @storage_names.clear + def self.default_storage_name + self.name + end + EOF + end + end + + it "should require a key" do + lambda do + DataMapper::Model.new("stuff") do + property :name, String + end.new + end.should raise_error(DataMapper::IncompleteResourceError) + end + + it "should hold repository-specific properties" do + Planet.properties(:legacy).should have_property(:cowabunga) + Planet.properties.should_not have_property(:cowabunga) + end + + it "should track the classes that include it" do + DataMapper::Resource.descendants.clear + klass = Class.new { include DataMapper::Resource } + DataMapper::Resource.descendants.should == Set.new([klass]) + end + + it "should return an instance of the created object" do + Planet.create(:name => 'Venus', :age => 1_000_000, :id => 42).should be_a_kind_of(Planet) + end + + it 'should provide persistance methods' do + planet = Planet.new + planet.should respond_to(:new_record?) + planet.should respond_to(:save) + planet.should respond_to(:destroy) + end + + it "should have attributes" do + attributes = { :name => 'Jupiter', :age => 1_000_000, :id => 42, :type => Planet, :data => nil } + jupiter = Planet.new(attributes) + jupiter.attributes.should == attributes + end + + it "should be able to set attributes" do + attributes = { :name => 'Jupiter', :age => 1_000_000, :id => 42, :type => Planet, :data => nil } + jupiter = Planet.new(attributes) + jupiter.attributes.should == attributes + + new_attributes = attributes.merge( :age => 2_500_000 ) + jupiter.attributes = new_attributes + jupiter.attributes.should == new_attributes + end + + it "should be able to set attributes using update_attributes" do + attributes = { :name => 'Jupiter', :age => 1_000_000, :id => 42, :type => Planet, :data => nil } + jupiter = Planet.new(attributes) + jupiter.attributes.should == attributes + + new_age = { :age => 3_700_000 } + jupiter.update_attributes(new_age).should be_true + jupiter.age.should == 3_700_000 + jupiter.attributes.should == attributes.merge(new_age) + end + + # Illustrates a possible controller situation, where an expected params + # key does not exist. + it "update_attributes(nil) should raise an exception" do + hincapie = Cyclist.new + params = {} + lambda { + hincapie.update_attributes(params[:does_not_exist]) + }.should raise_error(ArgumentError) + end + + it "update_attributes(:not_a_hash) should raise an exception" do + hincapie = Cyclist.new + lambda { + hincapie.update_attributes(:not_a_hash).should be_false + }.should raise_error(ArgumentError) + end + + # :core is a private accessor so Ruby should raise NameError + it "should not be able to set private attributes" do + lambda { + jupiter = Planet.new({ :core => "Molten Metal" }) + }.should raise_error(NameError) + end + + it "should not mark attributes dirty if they are similar after update" do + jupiter = Planet.new(:name => 'Jupiter', :age => 1_000_000, :id => 42, :data => { :a => "Yeah!" }) + jupiter.save.should be_true + + # discriminator will be set automatically + jupiter.type.should == Planet + + jupiter.attributes = { :name => 'Jupiter', :age => 1_000_000, :data => { :a => "Yeah!" } } + + jupiter.attribute_dirty?(:name).should be_false + jupiter.attribute_dirty?(:age).should be_false + jupiter.attribute_dirty?(:core).should be_false + jupiter.attribute_dirty?(:data).should be_false + + jupiter.dirty?.should be_false + end + + it "should not mark attributes dirty if they are similar after typecasting" do + jupiter = Planet.new(:name => 'Jupiter', :age => 1_000_000, :id => 42, :type => Planet) + jupiter.save.should be_true + jupiter.dirty?.should be_false + + jupiter.age = '1_000_000' + jupiter.attribute_dirty?(:age).should be_false + jupiter.dirty?.should be_false + end + + it "should track attributes" do + + # So attribute tracking is a feature of the Resource, + # not the Property. Properties are class-level declarations. + # Instance-level operations like this happen in Resource with methods + # and ivars it sets up. Like a @dirty_attributes Array for example to + # track dirty attributes. + + mars = Planet.new :name => 'Mars' + # #attribute_loaded? and #attribute_dirty? are a bit verbose, + # but I like the consistency and grouping of the methods. + + # initialize-set values are dirty as well. DM sets ivars + # directly when materializing, so an ivar won't exist + # if the value wasn't loaded by DM initially. Touching that + # ivar at all will declare it, so at that point it's loaded. + # This means #attribute_loaded?'s implementation could be very + # similar (if not identical) to: + # def attribute_loaded?(name) + # instance_variable_defined?("@#{name}") + # end + mars.attribute_loaded?(:name).should be_true + mars.attribute_dirty?(:id).should be_false + mars.attribute_dirty?(:name).should be_true + mars.attribute_loaded?(:age).should be_false + mars.attribute_dirty?(:data).should be_false + + mars.age.should be_nil + + # So accessing a value should ensure it's loaded. + # XXX: why? if the @ivar isn't set, which it wouldn't be in this + # case because mars is a new_record?, then perhaps it should return + # false + # mars.attribute_loaded?(:age).should be_true + + # A value should be able to be both loaded and nil. + mars.age.should be_nil + + # Unless you call #[]= it's not dirty. + mars.attribute_dirty?(:age).should be_false + + mars.age = 30 + mars.data = { :a => "Yeah!" } + + # Obviously. :-) + mars.attribute_dirty?(:age).should be_true + mars.attribute_dirty?(:data).should be_true + end + + it "should mark the key as dirty, if it is a natural key and has been set" do + phone = Phone.new + phone.name = 'iPhone' + phone.attribute_dirty?(:name).should be_true + end + + it 'should return the dirty attributes' do + pluto = Planet.new(:name => 'Pluto', :age => 500_000) + pluto.attribute_dirty?(:name).should be_true + pluto.attribute_dirty?(:age).should be_true + end + + it 'should overwite old dirty attributes with new ones' do + pluto = Planet.new(:name => 'Pluto', :age => 500_000) + pluto.dirty_attributes.size.should == 2 + pluto.attribute_dirty?(:name).should be_true + pluto.attribute_dirty?(:age).should be_true + pluto.name = "pluto" + pluto.dirty_attributes.size.should == 2 + pluto.attribute_dirty?(:name).should be_true + pluto.attribute_dirty?(:age).should be_true + end + + it 'should provide a key' do + Planet.new.should respond_to(:key) + end + + it 'should store and retrieve default values' do + Planet.property(:satellite_count, Integer, :default => 0) + # stupid example but it's reliable and works + Planet.property(:orbit_period, Float, :default => lambda { |r,p| p.name.to_s.length }) + earth = Planet.new(:name => 'Earth') + earth.satellite_count.should == 0 + earth.orbit_period.should == 12 + earth.satellite_count = 2 + earth.satellite_count.should == 2 + earth.orbit_period = 365.26 + earth.orbit_period.should == 365.26 + end + + describe "#reload_attributes" do + it 'should call collection.reload if not a new record' do + planet = Planet.new(:name => 'Omicron Persei VIII') + planet.stub!(:new_record?).and_return(false) + + collection = mock('collection') + collection.should_receive(:reload).with(:fields => [:name]).once + + planet.stub!(:collection).and_return(collection) + planet.reload_attributes(:name) + end + + it 'should not call collection.reload if no attributes are provided to reload' do + planet = Planet.new(:name => 'Omicron Persei VIII') + planet.stub!(:new_record?).and_return(false) + + collection = mock('collection') + collection.should_not_receive(:reload) + + planet.stub!(:collection).and_return(collection) + planet.reload_attributes + end + + it 'should not call collection.reload if the record is new' do + lambda { + Planet.new(:name => 'Omicron Persei VIII').reload_attributes(:name) + }.should_not raise_error + + planet = Planet.new(:name => 'Omicron Persei VIII') + planet.should_not_receive(:collection) + planet.reload_attributes(:name) + end + end + + describe '#reload' do + it 'should call #reload_attributes with the currently loaded attributes' do + planet = Planet.new(:name => 'Omicron Persei VIII', :age => 1) + planet.stub!(:new_record?).and_return(false) + + planet.should_receive(:reload_attributes).with(:name, :age).once + + planet.reload + end + + it 'should call #reload on the parent and child associations' do + planet = Planet.new(:name => 'Omicron Persei VIII', :age => 1) + planet.stub!(:new_record?).and_return(false) + + child_association = mock('child assoc') + child_association.should_receive(:reload).once.and_return(true) + + parent_association = mock('parent assoc') + parent_association.should_receive(:reload).once.and_return(true) + + planet.stub!(:child_associations).and_return([child_association]) + planet.stub!(:parent_associations).and_return([parent_association]) + planet.stub!(:reload_attributes).and_return(planet) + + planet.reload + end + + it 'should not do anything if the record is new' do + planet = Planet.new(:name => 'Omicron Persei VIII', :age => 1) + planet.should_not_receive(:reload_attributes) + planet.reload + end + end + + describe 'when retrieving by key' do + it 'should return the corresponding object' do + m = mock("planet") + Planet.should_receive(:get).with(1).and_return(m) + + Planet.get!(1).should == m + end + + it 'should raise an error if not found' do + Planet.should_receive(:get).and_return(nil) + + lambda do + Planet.get!(1) + end.should raise_error(DataMapper::ObjectNotFoundError) + end + end + + describe "inheritance" do + before(:all) do + class Media + include DataMapper::Resource + + storage_names[:default] = 'media' + storage_names[:west_coast] = 'm3d1a' + + property :name, String, :key => true + end + + class NewsPaper < Media + + storage_names[:east_coast] = 'mother' + + property :rating, Integer + end + end + + it 'should inherit storage_names' do + NewsPaper.storage_name(:default).should == 'media' + NewsPaper.storage_name(:west_coast).should == 'm3d1a' + NewsPaper.storage_name(:east_coast).should == 'mother' + Media.storage_name(:east_coast).should == 'medium' + end + + it 'should inherit properties' do + Media.properties.should have(1).entries + NewsPaper.properties.should have(2).entries + end + end + + describe "Single-table Inheritance" do + before(:all) do + class Plant + include DataMapper::Resource + + property :id, Integer, :key => true + property :length, Integer + + def calculate(int) + int ** 2 + end + + def length=(len) + attribute_set(:length, calculate(len)) + end + end + + class HousePlant < Plant + def calculate(int) + int ** 3 + end + end + + class PoisonIvy < Plant + def length=(len) + attribute_set(:length, len - 1) + end + end + end + + it "should be able to overwrite getters" do + @p = Plant.new + @p.length = 3 + @p.length.should == 9 + end + + it "should pick overwritten methods" do + @hp = HousePlant.new + @hp.length = 3 + @hp.length.should == 27 + end + + it "should pick overwritten setters" do + @pi = PoisonIvy.new + @pi.length = 3 + @pi.length.should == 2 + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/scope_spec.rb b/vendor/dm-core-0.9.6/spec/unit/scope_spec.rb new file mode 100644 index 0000000..a2db2bf --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/scope_spec.rb @@ -0,0 +1,142 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe DataMapper::Scope do + after do + Article.publicize_methods do + Article.scope_stack.clear # reset the stack before each spec + end + end + + describe '.with_scope' do + it 'should be protected' do + klass = class << Article; self; end + klass.should be_protected_method_defined(:with_scope) + end + + it 'should set the current scope for the block when given a Hash' do + Article.publicize_methods do + Article.with_scope :blog_id => 1 do + Article.query.should == DataMapper::Query.new(repository(:mock), Article, :blog_id => 1) + end + end + end + + it 'should set the current scope for the block when given a DataMapper::Query' do + Article.publicize_methods do + Article.with_scope query = DataMapper::Query.new(repository(:mock), Article) do + Article.query.should == query + end + end + end + + it 'should set the current scope for an inner block, merged with the outer scope' do + Article.publicize_methods do + Article.with_scope :blog_id => 1 do + Article.with_scope :author => 'dkubb' do + Article.query.should == DataMapper::Query.new(repository(:mock), Article, :blog_id => 1, :author => 'dkubb') + end + end + end + end + + it 'should reset the stack on error' do + Article.publicize_methods do + Article.query.should be_nil + lambda { + Article.with_scope(:blog_id => 1) { raise 'There was a problem!' } + }.should raise_error(RuntimeError) + Article.query.should be_nil + end + end + end + + describe '.with_exclusive_scope' do + it 'should be protected' do + klass = class << Article; self; end + klass.should be_protected_method_defined(:with_exclusive_scope) + end + + it 'should set the current scope for an inner block, ignoring the outer scope' do + Article.publicize_methods do + Article.with_scope :blog_id => 1 do + Article.with_exclusive_scope :author => 'dkubb' do + Article.query.should == DataMapper::Query.new(repository(:mock), Article, :author => 'dkubb') + end + end + end + end + + it 'should reset the stack on error' do + Article.publicize_methods do + Article.query.should be_nil + lambda { + Article.with_exclusive_scope(:blog_id => 1) { raise 'There was a problem!' } + }.should raise_error(RuntimeError) + Article.query.should be_nil + end + end + + it "should ignore the default_scope when using an exclusive scope" do + Article.default_scope.update(:blog_id => 1) + Article.publicize_methods do + Article.with_exclusive_scope(:author => 'dkubb') do + Article.query.should == DataMapper::Query.new(repository(:mock), Article, :author => 'dkubb') + end + end + Article.default_scope.delete(:blog_id) + end + + end + + describe '.scope_stack' do + it 'should be private' do + klass = class << Article; self; end + klass.should be_private_method_defined(:scope_stack) + end + + it 'should provide an Array' do + Article.publicize_methods do + Article.scope_stack.should be_kind_of(Array) + end + end + + it 'should be the same in a thread' do + Article.publicize_methods do + Article.scope_stack.object_id.should == Article.scope_stack.object_id + end + end + + it 'should be different in each thread' do + Article.publicize_methods do + a = Thread.new { Article.scope_stack } + b = Thread.new { Article.scope_stack } + + a.value.object_id.should_not == b.value.object_id + end + end + end + + describe '.query' do + it 'should be public' do + klass = class << Article; self; end + klass.should be_public_method_defined(:query) + end + + it 'should return nil if the scope stack is empty' do + Article.publicize_methods do + Article.scope_stack.should be_empty + Article.query.should be_nil + end + end + + it 'should return the last element of the scope stack' do + Article.publicize_methods do + query = DataMapper::Query.new(repository(:mock), Article) + Article.scope_stack << query + Article.query.object_id.should == query.object_id + end + end + end + + # TODO: specify the behavior of finders (all, first, get, []) when scope is in effect +end diff --git a/vendor/dm-core-0.9.6/spec/unit/transaction_spec.rb b/vendor/dm-core-0.9.6/spec/unit/transaction_spec.rb new file mode 100644 index 0000000..241bcb2 --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/transaction_spec.rb @@ -0,0 +1,493 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe DataMapper::Transaction do + + before :all do + class Smurf + include DataMapper::Resource + property :id, Integer, :key => true + end + end + + before :each do + @adapter = mock("adapter", :name => 'mock_adapter') + @repository = mock("repository") + @repository_adapter = mock("repository adapter", :name => 'mock_repository_adapter') + @resource = Smurf.new + @transaction_primitive = mock("transaction primitive") + @repository_transaction_primitive = mock("repository transaction primitive") + @array = [@adapter, @repository] + + @adapter.should_receive(:is_a?).any_number_of_times.with(Array).and_return(false) + @adapter.should_receive(:is_a?).any_number_of_times.with(DataMapper::Adapters::AbstractAdapter).and_return(true) + @adapter.should_receive(:transaction_primitive).any_number_of_times.and_return(@transaction_primitive) + @repository.should_receive(:is_a?).any_number_of_times.with(Array).and_return(false) + @repository.should_receive(:is_a?).any_number_of_times.with(DataMapper::Adapters::AbstractAdapter).and_return(false) + @repository.should_receive(:is_a?).any_number_of_times.with(DataMapper::Repository).and_return(true) + @repository.should_receive(:adapter).any_number_of_times.and_return(@repository_adapter) + @repository_adapter.should_receive(:is_a?).any_number_of_times.with(Array).and_return(false) + @repository_adapter.should_receive(:is_a?).any_number_of_times.with(DataMapper::Adapters::AbstractAdapter).and_return(true) + @repository_adapter.should_receive(:transaction_primitive).any_number_of_times.and_return(@repository_transaction_primitive) + @transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:close).and_return(true) + @transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:begin).and_return(true) + @transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:prepare).and_return(true) + @transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:rollback).and_return(true) + @transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:rollback_prepared).and_return(true) + @transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:commit).and_return(true) + @repository_transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:close).and_return(true) + @repository_transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:begin).and_return(true) + @repository_transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:prepare).and_return(true) + @repository_transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:rollback).and_return(true) + @repository_transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:rollback_prepared).and_return(true) + @repository_transaction_primitive.should_receive(:respond_to?).any_number_of_times.with(:commit).and_return(true) + end + + it "should be able to initialize with an Array" do + DataMapper::Transaction.new(@array) + end + it "should be able to initialize with DataMapper::Adapters::AbstractAdapters" do + DataMapper::Transaction.new(@adapter) + end + it "should be able to initialize with DataMapper::Repositories" do + DataMapper::Transaction.new(@repository) + end + it "should be able to initialize with DataMapper::Resource subclasses" do + DataMapper::Transaction.new(Smurf) + end + it "should be able to initialize with DataMapper::Resources" do + DataMapper::Transaction.new(Smurf.new) + end + it "should initialize with no transaction_primitives" do + DataMapper::Transaction.new.transaction_primitives.empty?.should == true + end + it "should initialize with state :none" do + DataMapper::Transaction.new.state.should == :none + end + it "should initialize the adapters given on creation" do + DataMapper::Transaction.new(Smurf).adapters.should == {Smurf.repository.adapter => :none} + end + it "should be able receive multiple adapters on creation" do + DataMapper::Transaction.new(Smurf, @resource, @adapter, @repository) + end + it "should be able to initialize with a block" do + p = Proc.new do end + @transaction_primitive.stub!(:begin) + @transaction_primitive.stub!(:prepare) + @transaction_primitive.stub!(:commit) + @adapter.stub!(:push_transaction) + @adapter.stub!(:pop_transaction) + @transaction_primitive.stub!(:close) + DataMapper::Transaction.new(@adapter, &p) + end + it "should accept new adapters after creation" do + t = DataMapper::Transaction.new(@adapter, @repository) + t.adapters.should == {@adapter => :none, @repository_adapter => :none} + t.link(@resource) + t.adapters.should == {@adapter => :none, @repository_adapter => :none, Smurf.repository.adapter => :none} + end + it "should not accept new adapters after state is changed" do + t = DataMapper::Transaction.new(@adapter, @repository) + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + t.begin + lambda do t.link(@resource) end.should raise_error(Exception, /Illegal state/) + end + describe "#begin" do + before :each do + @transaction = DataMapper::Transaction.new(@adapter, @repository) + end + it "should raise error if state is changed" do + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + @transaction.begin + lambda do @transaction.begin end.should raise_error(Exception, /Illegal state/) + end + it "should try to connect each adapter (or log fatal error), then begin each adapter (or rollback and close)" do + @transaction.should_receive(:each_adapter).with(:connect_adapter, [:log_fatal_transaction_breakage]) + @transaction.should_receive(:each_adapter).with(:begin_adapter, [:rollback_and_close_adapter_if_begin, :close_adapter_if_none]) + @transaction.begin + end + it "should leave with state :begin" do + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + @transaction.begin + @transaction.state.should == :begin + end + end + describe "#rollback" do + before :each do + @transaction = DataMapper::Transaction.new(@adapter, @repository) + end + it "should raise error if state is :none" do + lambda do @transaction.rollback end.should raise_error(Exception, /Illegal state/) + end + it "should raise error if state is :commit" do + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + @transaction_primitive.stub!(:prepare) + @repository_transaction_primitive.stub!(:prepare) + @transaction_primitive.stub!(:commit) + @repository_transaction_primitive.stub!(:commit) + @transaction_primitive.stub!(:close) + @repository_transaction_primitive.stub!(:close) + @transaction.begin + @transaction.commit + lambda do @transaction.rollback end.should raise_error(Exception, /Illegal state/) + end + it "should try to rollback each adapter (or rollback and close), then then close (or log fatal error)" do + @transaction.should_receive(:each_adapter).with(:connect_adapter, [:log_fatal_transaction_breakage]) + @transaction.should_receive(:each_adapter).with(:begin_adapter, [:rollback_and_close_adapter_if_begin, :close_adapter_if_none]) + @transaction.should_receive(:each_adapter).with(:rollback_adapter_if_begin, [:rollback_and_close_adapter_if_begin, :close_adapter_if_none]) + @transaction.should_receive(:each_adapter).with(:close_adapter_if_open, [:log_fatal_transaction_breakage]) + @transaction.should_receive(:each_adapter).with(:rollback_prepared_adapter_if_prepare, [:rollback_prepared_and_close_adapter_if_begin, :close_adapter_if_none]) + @transaction.begin + @transaction.rollback + end + it "should leave with state :rollback" do + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + @transaction_primitive.stub!(:rollback) + @repository_transaction_primitive.stub!(:rollback) + @transaction_primitive.stub!(:close) + @repository_transaction_primitive.stub!(:close) + @transaction.begin + @transaction.rollback + @transaction.state.should == :rollback + end + end + describe "#commit" do + describe "without a block" do + before :each do + @transaction = DataMapper::Transaction.new(@adapter, @repository) + end + it "should raise error if state is :none" do + lambda do @transaction.commit end.should raise_error(Exception, /Illegal state/) + end + it "should raise error if state is :commit" do + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + @transaction_primitive.stub!(:prepare) + @repository_transaction_primitive.stub!(:prepare) + @transaction_primitive.stub!(:commit) + @repository_transaction_primitive.stub!(:commit) + @transaction_primitive.stub!(:close) + @repository_transaction_primitive.stub!(:close) + @transaction.begin + @transaction.commit + lambda do @transaction.commit end.should raise_error(Exception, /Illegal state/) + end + it "should raise error if state is :rollback" do + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + @transaction_primitive.stub!(:rollback) + @repository_transaction_primitive.stub!(:rollback) + @transaction_primitive.stub!(:close) + @repository_transaction_primitive.stub!(:close) + @transaction.begin + @transaction.rollback + lambda do @transaction.commit end.should raise_error(Exception, /Illegal state/) + end + it "should try to prepare each adapter (or rollback and close), then commit each adapter (or log fatal error), then close (or log fatal error)" do + @transaction.should_receive(:each_adapter).with(:connect_adapter, [:log_fatal_transaction_breakage]) + @transaction.should_receive(:each_adapter).with(:begin_adapter, [:rollback_and_close_adapter_if_begin, :close_adapter_if_none]) + @transaction.should_receive(:each_adapter).with(:prepare_adapter, [:rollback_and_close_adapter_if_begin, :rollback_prepared_and_close_adapter_if_prepare]) + @transaction.should_receive(:each_adapter).with(:commit_adapter, [:log_fatal_transaction_breakage]) + @transaction.should_receive(:each_adapter).with(:close_adapter, [:log_fatal_transaction_breakage]) + @transaction.begin + @transaction.commit + end + it "should leave with state :commit" do + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + @transaction_primitive.stub!(:prepare) + @repository_transaction_primitive.stub!(:prepare) + @transaction_primitive.stub!(:commit) + @repository_transaction_primitive.stub!(:commit) + @transaction_primitive.stub!(:close) + @repository_transaction_primitive.stub!(:close) + @transaction.begin + @transaction.commit + @transaction.state.should == :commit + end + end + describe "with a block" do + before :each do + @transaction = DataMapper::Transaction.new(@adapter, @repository) + end + it "should raise if state is not :none" do + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + @transaction.begin + lambda do @transaction.commit do end end.should raise_error(Exception, /Illegal state/) + end + it "should begin, yield and commit if the block raises no exception" do + @repository_transaction_primitive.should_receive(:begin) + @repository_transaction_primitive.should_receive(:prepare) + @repository_transaction_primitive.should_receive(:commit) + @repository_transaction_primitive.should_receive(:close) + @transaction_primitive.should_receive(:begin) + @transaction_primitive.should_receive(:prepare) + @transaction_primitive.should_receive(:commit) + @transaction_primitive.should_receive(:close) + p = Proc.new do end + @transaction.should_receive(:within).with(&p) + @transaction.commit(&p) + end + it "should rollback if the block raises an exception" do + @repository_transaction_primitive.should_receive(:begin) + @repository_transaction_primitive.should_receive(:rollback) + @repository_transaction_primitive.should_receive(:close) + @transaction_primitive.should_receive(:begin) + @transaction_primitive.should_receive(:rollback) + @transaction_primitive.should_receive(:close) + p = Proc.new do raise "test exception, never mind me" end + @transaction.should_receive(:within).with(&p) + lambda do @transaction.commit(&p) end.should raise_error(Exception, /test exception, never mind me/) + end + end + end + describe "#within" do + before :each do + @transaction = DataMapper::Transaction.new(@adapter, @repository) + end + it "should raise if no block is provided" do + lambda do @transaction.within end.should raise_error(Exception, /No block/) + end + it "should raise if state is not :begin" do + lambda do @transaction.within do end end.should raise_error(Exception, /Illegal state/) + end + it "should push itself on the per thread transaction context of each adapter and then pop itself out again" do + @repository_transaction_primitive.should_receive(:begin) + @transaction_primitive.should_receive(:begin) + @repository_adapter.should_receive(:push_transaction).with(@transaction) + @adapter.should_receive(:push_transaction).with(@transaction) + @repository_adapter.should_receive(:pop_transaction) + @adapter.should_receive(:pop_transaction) + @transaction.begin + @transaction.within do end + end + it "should push itself on the per thread transaction context of each adapter and then pop itself out again even if an exception was raised" do + @repository_transaction_primitive.should_receive(:begin) + @transaction_primitive.should_receive(:begin) + @repository_adapter.should_receive(:push_transaction).with(@transaction) + @adapter.should_receive(:push_transaction).with(@transaction) + @repository_adapter.should_receive(:pop_transaction) + @adapter.should_receive(:pop_transaction) + @transaction.begin + lambda do @transaction.within do raise "test exception, never mind me" end end.should raise_error(Exception, /test exception, never mind me/) + end + end + describe "#method_missing" do + before :each do + @transaction = DataMapper::Transaction.new(@adapter, @repository) + @adapter.should_receive(:is_a?).any_number_of_times.with(Spec::Mocks::AnyArgsConstraint).and_return(false) + @adapter.should_receive(:is_a?).any_number_of_times.with(Spec::Mocks::NoArgsConstraint).and_return(false) + @adapter.should_receive(:is_a?).any_number_of_times.with(Regexp).and_return(false) + end + it "should delegate calls to [a method we have]_if_[state](adapter) to [a method we have](adapter) if state of adapter is [state]" do + @transaction.should_receive(:state_for).with(@adapter).and_return(:begin) + @transaction.should_receive(:connect_adapter).with(@adapter) + @transaction.connect_adapter_if_begin(@adapter) + end + it "should not delegate calls to [a method we have]_if_[state](adapter) to [a method we have](adapter) if state of adapter is not [state]" do + @transaction.should_receive(:state_for).with(@adapter).and_return(:commit) + @transaction.should_not_receive(:connect_adapter).with(@adapter) + @transaction.connect_adapter_if_begin(@adapter) + end + it "should delegate calls to [a method we have]_unless_[state](adapter) to [a method we have](adapter) if state of adapter is not [state]" do + @transaction.should_receive(:state_for).with(@adapter).and_return(:none) + @transaction.should_receive(:connect_adapter).with(@adapter) + @transaction.connect_adapter_unless_begin(@adapter) + end + it "should not delegate calls to [a method we have]_unless_[state](adapter) to [a method we have](adapter) if state of adapter is [state]" do + @transaction.should_receive(:state_for).with(@adapter).and_return(:begin) + @transaction.should_not_receive(:connect_adapter).with(@adapter) + @transaction.connect_adapter_unless_begin(@adapter) + end + it "should not delegate calls whose first argument is not a DataMapper::Adapters::AbstractAdapter" do + lambda do @transaction.connect_adapter_unless_begin("plur") end.should raise_error + end + it "should not delegate calls that do not look like an if or unless followed by a state" do + lambda do @transaction.connect_adapter_unless_hepp(@adapter) end.should raise_error + lambda do @transaction.connect_adapter_when_begin(@adapter) end.should raise_error + end + it "should not delegate calls that we can not respond to" do + lambda do @transaction.connect_adapters_unless_begin(@adapter) end.should raise_error + lambda do @transaction.connect_adapters_if_begin(@adapter) end.should raise_error + end + end + it "should be able to produce the connection for an adapter" do + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + @transaction = DataMapper::Transaction.new(@adapter, @repository) + @transaction.begin + @transaction.primitive_for(@adapter).should == @transaction_primitive + end + describe "#each_adapter" do + before :each do + @transaction = DataMapper::Transaction.new(@adapter, @repository) + @adapter.should_receive(:is_a?).any_number_of_times.with(Spec::Mocks::AnyArgsConstraint).and_return(false) + @adapter.should_receive(:is_a?).any_number_of_times.with(Spec::Mocks::NoArgsConstraint).and_return(false) + @adapter.should_receive(:is_a?).any_number_of_times.with(Regexp).and_return(false) + @repository_adapter.should_receive(:is_a?).any_number_of_times.with(Spec::Mocks::AnyArgsConstraint).and_return(false) + @repository_adapter.should_receive(:is_a?).any_number_of_times.with(Spec::Mocks::NoArgsConstraint).and_return(false) + @repository_adapter.should_receive(:is_a?).any_number_of_times.with(Regexp).and_return(false) + end + it "should send the first argument to itself once for each adapter" do + @transaction.should_receive(:plupp).with(@adapter) + @transaction.should_receive(:plupp).with(@repository_adapter) + @transaction.instance_eval do each_adapter(:plupp, [:plur]) end + end + it "should stop sending if any call raises an exception, then send each element of the second argument to itself with each adapter as argument" do + a1 = @repository_adapter + a2 = @adapter + @transaction.adapters.instance_eval do + @a1 = a1 + @a2 = a2 + def each(&block) + yield(@a1, :none) + yield(@a2, :none) + end + end + @transaction.should_receive(:plupp).with(@repository_adapter).and_throw(Exception.new("test error - dont mind me")) + @transaction.should_not_receive(:plupp).with(@adapter) + @transaction.should_receive(:plur).with(@adapter) + @transaction.should_receive(:plur).with(@repository_adapter) + lambda do @transaction.instance_eval do each_adapter(:plupp, [:plur]) end end.should raise_error(Exception, /test error - dont mind me/) + end + it "should send each element of the second argument to itself with each adapter as argument even if exceptions occur in the process" do + a1 = @repository_adapter + a2 = @adapter + @transaction.adapters.instance_eval do + @a1 = a1 + @a2 = a2 + def each(&block) + yield(@a1, :none) + yield(@a2, :none) + end + end + @transaction.should_receive(:plupp).with(@repository_adapter).and_throw(Exception.new("test error - dont mind me")) + @transaction.should_not_receive(:plupp).with(@adapter) + @transaction.should_receive(:plur).with(@adapter).and_throw(Exception.new("another test error")) + @transaction.should_receive(:plur).with(@repository_adapter).and_throw(Exception.new("yet another error")) + lambda do @transaction.instance_eval do each_adapter(:plupp, [:plur]) end end.should raise_error(Exception, /test error - dont mind me/) + end + end + it "should be able to return the state for a given adapter" do + @transaction = DataMapper::Transaction.new(@adapter, @repository) + a1 = @adapter + a2 = @repository_adapter + @transaction.instance_eval do state_for(a1) end.should == :none + @transaction.instance_eval do state_for(a2) end.should == :none + @transaction.instance_eval do @adapters[a1] = :begin end + @transaction.instance_eval do state_for(a1) end.should == :begin + @transaction.instance_eval do state_for(a2) end.should == :none + end + describe "#do_adapter" do + before :each do + @transaction = DataMapper::Transaction.new(@adapter, @repository) + @adapter.should_receive(:is_a?).any_number_of_times.with(Spec::Mocks::AnyArgsConstraint).and_return(false) + @adapter.should_receive(:is_a?).any_number_of_times.with(Spec::Mocks::NoArgsConstraint).and_return(false) + @adapter.should_receive(:is_a?).any_number_of_times.with(Regexp).and_return(false) + end + it "should raise if there is no connection for the adapter" do + a1 = @adapter + lambda do @transaction.instance_eval do do_adapter(a1, :ping, :pong) end end.should raise_error(Exception, /No primitive/) + end + it "should raise if the adapter has the wrong state" do + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + @transaction.begin + a1 = @adapter + @adapter.should_not_receive(:ping_transaction).with(@transaction) + lambda do @transaction.instance_eval do do_adapter(a1, :ping, :pong) end end.should raise_error(Exception, /Illegal state/) + end + it "should delegate to the adapter if the connection exists and we have the right state" do + @transaction_primitive.stub!(:begin) + @repository_transaction_primitive.stub!(:begin) + @transaction.begin + a1 = @adapter + @transaction_primitive.should_receive(:ping) + @transaction.instance_eval do do_adapter(a1, :ping, :begin) end + end + end + describe "#connect_adapter" do + before :each do + @other_adapter = mock("adapter") + @other_adapter.should_receive(:is_a?).any_number_of_times.with(Array).and_return(false) + @other_adapter.should_receive(:is_a?).any_number_of_times.with(DataMapper::Adapters::AbstractAdapter).and_return(true) + @transaction = DataMapper::Transaction.new(@other_adapter) + end + it "should be able to connect an adapter" do + a1 = @other_adapter + @other_adapter.should_receive(:transaction_primitive).and_return(@transaction_primitive) + @transaction.instance_eval do connect_adapter(a1) end + @transaction.transaction_primitives[@other_adapter].should == @transaction_primitive + end + end + describe "#close adapter" do + before :each do + @other_adapter = mock("adapter") + @other_adapter.should_receive(:is_a?).any_number_of_times.with(Array).and_return(false) + @other_adapter.should_receive(:is_a?).any_number_of_times.with(DataMapper::Adapters::AbstractAdapter).and_return(true) + @transaction = DataMapper::Transaction.new(@other_adapter) + end + it "should be able to close the connection of an adapter" do + a1 = @other_adapter + @transaction_primitive.should_receive(:close) + @other_adapter.should_receive(:transaction_primitive).and_return(@transaction_primitive) + @transaction.instance_eval do connect_adapter(a1) end + @transaction.transaction_primitives[@other_adapter].should == @transaction_primitive + @transaction.instance_eval do close_adapter(a1) end + @transaction.transaction_primitives[@other_adapter].should == nil + end + end + describe "the transaction operation methods" do + before :each do + @other_adapter = mock("adapter") + @other_adapter.should_receive(:is_a?).any_number_of_times.with(Array).and_return(false) + @other_adapter.should_receive(:is_a?).any_number_of_times.with(DataMapper::Adapters::AbstractAdapter).and_return(true) + @other_adapter.should_receive(:is_a?).any_number_of_times.with(Spec::Mocks::AnyArgsConstraint).and_return(false) + @other_adapter.should_receive(:is_a?).any_number_of_times.with(Spec::Mocks::NoArgsConstraint).and_return(false) + @other_adapter.should_receive(:is_a?).any_number_of_times.with(Regexp).and_return(false) + @transaction = DataMapper::Transaction.new(@other_adapter) + end + it "should only allow adapters in state :none to begin" do + a1 = @other_adapter + @transaction.should_receive(:do_adapter).with(@other_adapter, :begin, :none) + @transaction.instance_eval do begin_adapter(a1) end + end + it "should only allow adapters in state :begin to prepare" do + a1 = @other_adapter + @transaction.should_receive(:do_adapter).with(@other_adapter, :prepare, :begin) + @transaction.instance_eval do prepare_adapter(a1) end + end + it "should only allow adapters in state :prepare to commit" do + a1 = @other_adapter + @transaction.should_receive(:do_adapter).with(@other_adapter, :commit, :prepare) + @transaction.instance_eval do commit_adapter(a1) end + end + it "should only allow adapters in state :begin to rollback" do + a1 = @other_adapter + @transaction.should_receive(:do_adapter).with(@other_adapter, :rollback, :begin) + @transaction.instance_eval do rollback_adapter(a1) end + end + it "should only allow adapters in state :prepare to rollback_prepared" do + a1 = @other_adapter + @transaction.should_receive(:do_adapter).with(@other_adapter, :rollback_prepared, :prepare) + @transaction.instance_eval do rollback_prepared_adapter(a1) end + end + it "should do delegate properly for rollback_and_close" do + a1 = @other_adapter + @transaction.should_receive(:rollback_adapter).with(@other_adapter) + @transaction.should_receive(:close_adapter).with(@other_adapter) + @transaction.instance_eval do rollback_and_close_adapter(a1) end + end + it "should do delegate properly for rollback_prepared_and_close" do + a1 = @other_adapter + @transaction.should_receive(:rollback_prepared_adapter).with(@other_adapter) + @transaction.should_receive(:close_adapter).with(@other_adapter) + @transaction.instance_eval do rollback_prepared_and_close_adapter(a1) end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/type_map_spec.rb b/vendor/dm-core-0.9.6/spec/unit/type_map_spec.rb new file mode 100644 index 0000000..e762e2c --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/type_map_spec.rb @@ -0,0 +1,114 @@ +require 'pathname' +require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper' + +describe DataMapper::TypeMap do + + before(:each) do + @tm = DataMapper::TypeMap.new + end + + describe "#map" do + it "should return a type map chain" do + @tm.map(String).should be_instance_of(DataMapper::TypeMap::TypeChain) + end + + it "should return the original chain if the type has already been mapped" do + tc = @tm.map(String) + @tm.map(String).should == tc + end + end + + describe "#lookup" do + it "should the primitive's mapping the class has a primitive type" do + @tm.map(String).to(:varchar) + + lambda { @tm.lookup(DM::Text) }.should_not raise_error + end + + it "should merge in the parent type map's translated match" do + + end + + describe "#lookup_from_map" do + it "should merge the translated type match into the parent match" do + @tm.map(String).to(:varchar) + + child = DataMapper::TypeMap.new(@tm) + child.map(String).with(:size => 100) + + child.lookup_from_map(String).should == {:primitive => :varchar, :size => 100} + end + end + + describe "#lookup_by_type" do + it "should raise an exception if the type is not mapped and does not have a primitive" do + klass = Class.new + lambda { @tm.lookup(klass) }.should raise_error("Type #{klass} must have a default primitive or type map entry") + end + end + end + + describe "#map" do + it "should create a new TypeChain if there is no match" do + @tm.chains.should_not have_key(String) + + DataMapper::TypeMap::TypeChain.should_receive(:new) + + @tm.map(String) + end + + it "should not create a new TypeChain if there is a match" do + @tm.map(String) + + DataMapper::TypeMap::TypeChain.should_not_receive(:new) + + @tm.map(String) + end + end + + describe DataMapper::TypeMap::TypeChain do + describe "#to" do + it "should be a setter for @primitive" do + tc = DataMapper::TypeMap::TypeChain.new + + lambda { tc.to(:primitive) }.should change { tc.primitive }.to(:primitive) + end + + it "should return itself" do + tc = DataMapper::TypeMap::TypeChain.new + + tc.to(:primitive).should == tc + end + end + + describe "#with" do + it "should return itself" do + tc = DataMapper::TypeMap::TypeChain.new + + tc.with(:key => :value).should == tc + end + + it "should raise an error if the argument is not a hash" do + tc = DataMapper::TypeMap::TypeChain.new + + lambda { tc.with(:key) }.should raise_error("method 'with' expects a hash") + end + + it "should merge the argument hash into the attributes hash" do + tc = DataMapper::TypeMap::TypeChain.new + + tc.with(:key => :value).with(:size => 10).attributes.should == {:key => :value, :size => 10} + end + end + + describe "#translate" do + it "should merge the attributes hash with the primitive value" do + DataMapper::TypeMap::TypeChain.new.to(:int).with(:size => 10).translate.should == {:primitive => :int, :size => 10} + end + + it "should overwrite any :primitive entry set using the 'with' method" do + DataMapper::TypeMap::TypeChain.new.to(:int).with(:primitive => :varchar).translate.should == {:primitive => :int} + end + end + end +end diff --git a/vendor/dm-core-0.9.6/spec/unit/type_spec.rb b/vendor/dm-core-0.9.6/spec/unit/type_spec.rb new file mode 100644 index 0000000..c8a59da --- /dev/null +++ b/vendor/dm-core-0.9.6/spec/unit/type_spec.rb @@ -0,0 +1,119 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper')) + +describe DataMapper::Type do + + before(:each) do + class TestType < DataMapper::Type + primitive String + size 10 + end + + class TestType2 < DataMapper::Type + primitive String + size 10 + + def self.load(value, property) + value.reverse + end + + def self.dump(value, property) + value.reverse + end + end + + class TestResource + include DataMapper::Resource + end + + class TestType3 < DataMapper::Type + primitive String + size 10 + attr_accessor :property, :value + + def self.load(value, property) + type = self.new + type.property = property + type.value = value + type + end + + def self.dump(value, property) + value.value + end + end + end + + it "should have the same PROPERTY_OPTIONS array as DataMapper::Property" do + DataMapper::Type::PROPERTY_OPTIONS.should == DataMapper::Property::PROPERTY_OPTIONS + end + + it "should create a new type based on String primitive" do + TestType.primitive.should == String + end + + it "should have size of 10" do + TestType.size.should == 10 + end + + it "should have options hash exactly equal to options specified in custom type" do + #ie. it should not include null elements + TestType.options.should == { :size => 10, :length => 10 } + end + + it "should have length aliased to size" do + TestType.length.should == TestType.size + end + + it "should pass through the value if load wasn't overridden" do + TestType.load("test", nil).should == "test" + end + + it "should pass through the value if dump wasn't overridden" do + TestType.dump("test", nil).should == "test" + end + + it "should not raise NotImplementedException if load was overridden" do + TestType2.dump("helo", nil).should == "oleh" + end + + it "should not raise NotImplementedException if dump was overridden" do + TestType2.load("oleh", nil).should == "helo" + end + + describe "using a custom type" do + before do + @property = DataMapper::Property.new TestResource, :name, TestType3, {} + end + + it "should return a object of the same type" do + TestType3.load("helo", @property).class.should == TestType3 + end + + it "should contain the property" do + TestType3.load("helo", @property).property.should == @property + end + + it "should contain the value" do + TestType3.load("helo", @property).value.should == "helo" + end + + it "should return the value" do + obj = TestType3.load("helo", @property) + TestType3.dump(obj, @property).should == "helo" + end + end + + describe "using def Type" do + before do + @class = Class.new(DataMapper::Type(String, :size => 20)) + end + + it "should be of the specified type" do + @class.primitive.should == String + end + + it "should have the right options set" do + @class.size.should == 20 + end + end +end diff --git a/vendor/dm-core-0.9.6/tasks/ci.rb b/vendor/dm-core-0.9.6/tasks/ci.rb new file mode 100644 index 0000000..da640bf --- /dev/null +++ b/vendor/dm-core-0.9.6/tasks/ci.rb @@ -0,0 +1,36 @@ +task 'ci:doc' => :doc + +namespace :ci do + + task :prepare do + rm_rf ROOT + "ci" + mkdir_p ROOT + "ci" + mkdir_p ROOT + "ci/doc" + mkdir_p ROOT + "ci/cyclomatic" + mkdir_p ROOT + "ci/token" + end + + Spec::Rake::SpecTask.new(:spec => :prepare) do |t| + t.spec_opts = ["--colour", "--format", "specdoc", "--format", "html:#{ROOT}/ci/rspec_report.html", "--diff"] + t.spec_files = Pathname.glob((ROOT + 'spec/**/*_spec.rb').to_s) + unless ENV['NO_RCOV'] + t.rcov = true + t.rcov_opts << '--exclude' << "spec,gems" + t.rcov_opts << '--text-summary' + t.rcov_opts << '--sort' << 'coverage' << '--sort-reverse' + t.rcov_opts << '--only-uncovered' + end + end + + task :saikuro => :prepare do + system "saikuro -c -i lib -y 0 -w 10 -e 15 -o ci/cyclomatic" + mv 'ci/cyclomatic/index_cyclo.html', 'ci/cyclomatic/index.html' + + system "saikuro -t -i lib -y 0 -w 20 -e 30 -o ci/token" + mv 'ci/token/index_token.html', 'ci/token/index.html' + end + +end + +#task :ci => %w[ ci:spec ci:doc ci:saikuro install ci:publish ] # yard-related tasks do not work yet +task :ci => %w[ ci:spec ] diff --git a/vendor/dm-core-0.9.6/tasks/dm.rb b/vendor/dm-core-0.9.6/tasks/dm.rb new file mode 100644 index 0000000..2e4bac2 --- /dev/null +++ b/vendor/dm-core-0.9.6/tasks/dm.rb @@ -0,0 +1,63 @@ +task :default => 'dm:spec' +task :spec => 'dm:spec' +task :rcov => 'dm:rcov' + +namespace :spec do + task :unit => 'dm:spec:unit' + task :integration => 'dm:spec:integration' +end + +namespace :rcov do + task :unit => 'dm:rcov:unit' + task :integration => 'dm:rcov:integration' +end + +namespace :dm do + def run_spec(name, files, rcov) + Spec::Rake::SpecTask.new(name) do |t| + t.spec_opts << '--colour' << '--loadby' << 'random' + t.spec_files = Pathname.glob(ENV['FILES'] || files.to_s) + t.rcov = rcov + t.rcov_opts << '--exclude' << 'spec,environment.rb' + t.rcov_opts << '--text-summary' + t.rcov_opts << '--sort' << 'coverage' << '--sort-reverse' + t.rcov_opts << '--only-uncovered' + end + end + + unit_specs = ROOT + 'spec/unit/**/*_spec.rb' + integration_specs = ROOT + 'spec/integration/**/*_spec.rb' + all_specs = ROOT + 'spec/**/*_spec.rb' + + desc "Run all specifications" + run_spec('spec', all_specs, false) + + desc "Run all specifications with rcov" + run_spec('rcov', all_specs, true) + + namespace :spec do + desc "Run unit specifications" + run_spec('unit', unit_specs, false) + + desc "Run integration specifications" + run_spec('integration', integration_specs, false) + end + + namespace :rcov do + desc "Run unit specifications with rcov" + run_spec('unit', unit_specs, true) + + desc "Run integration specifications with rcov" + run_spec('integration', integration_specs, true) + end + + desc "Run all comparisons with ActiveRecord" + task :perf do + sh ROOT + 'script/performance.rb' + end + + desc "Profile DataMapper" + task :profile do + sh ROOT + 'script/profile.rb' + end +end diff --git a/vendor/dm-core-0.9.6/tasks/doc.rb b/vendor/dm-core-0.9.6/tasks/doc.rb new file mode 100644 index 0000000..5b7fb64 --- /dev/null +++ b/vendor/dm-core-0.9.6/tasks/doc.rb @@ -0,0 +1,20 @@ +# when yard's ready, it'll have to come back, but for now... +Rake::RDocTask.new("doc") do |t| + t.rdoc_dir = 'doc' + t.title = "DataMapper - Ruby Object Relational Mapper" + t.options = ['--line-numbers', '--inline-source', '--all'] + t.rdoc_files.include("README.txt", "QUICKLINKS", "FAQ", "lib/**/**/*.rb") +end + +begin + gem 'yard', '>=0.2.1' + require 'yard' + + YARD::Rake::YardocTask.new("yardoc") do |t| + t.options << '--protected' + # t.options << '-q' + # t.files << '...anyglobshere...' + end +rescue Exception + # yard not installed +end diff --git a/vendor/dm-core-0.9.6/tasks/gemspec.rb b/vendor/dm-core-0.9.6/tasks/gemspec.rb new file mode 100644 index 0000000..5f00431 --- /dev/null +++ b/vendor/dm-core-0.9.6/tasks/gemspec.rb @@ -0,0 +1,23 @@ +desc "Generate gemspec" +task :gemspec do |x| + # Clean up extraneous files before checking manifest + %x[rake clean] + + # Check the manifest before generating the gemspec + manifest = %x[rake check_manifest] + manifest.gsub!("(in /usr/local/projects/dm/dm-core)\n", "") + + unless manifest.empty? + print "\n", "#"*68, "\n" + print <<-EOS + Manifest.txt is not up-to-date. Please review the changes below. + If the changes are correct, run 'rake check_manifest | patch' + and then run this command again. +EOS + print "#"*68, "\n\n" + puts manifest + else + %x[rake debug_gem > #{GEM_NAME}.gemspec] + puts "Successfully created gemspec for #{GEM_NAME}!" + end +end diff --git a/vendor/dm-core-0.9.6/tasks/hoe.rb b/vendor/dm-core-0.9.6/tasks/hoe.rb new file mode 100644 index 0000000..7a35368 --- /dev/null +++ b/vendor/dm-core-0.9.6/tasks/hoe.rb @@ -0,0 +1,46 @@ +require 'hoe' + +@config_file = "~/.rubyforge/user-config.yml" +@config = nil +RUBYFORGE_USERNAME = "unknown" +def rubyforge_username + unless @config + begin + @config = YAML.load(File.read(File.expand_path(@config_file))) + rescue + puts <<-EOS +ERROR: No rubyforge config file found: #{@config_file} +Run 'rubyforge setup' to prepare your env for access to Rubyforge + - See http://newgem.rubyforge.org/rubyforge.html for more details + EOS + exit + end + end + RUBYFORGE_USERNAME.replace @config["username"] +end + +# Remove hoe dependency +class Hoe + def extra_dev_deps + @extra_dev_deps.reject! { |dep| dep[0] == "hoe" } + @extra_dev_deps + end +end + +hoe = Hoe.new(GEM_NAME, GEM_VERSION) do |p| + + p.developer(AUTHOR, EMAIL) + + p.description = PROJECT_DESCRIPTION + p.summary = PROJECT_SUMMARY + p.url = PROJECT_URL + + p.rubyforge_name = PROJECT_NAME if PROJECT_NAME + + p.clean_globs |= ["{coverage,doc,log,tmp}", "**/*.{log,db}", "profile_results.*", "**/.DS_Store"] + + GEM_DEPENDENCIES.each do |dep| + p.extra_deps << dep + end + +end diff --git a/vendor/dm-core-0.9.6/tasks/install.rb b/vendor/dm-core-0.9.6/tasks/install.rb new file mode 100644 index 0000000..24ca864 --- /dev/null +++ b/vendor/dm-core-0.9.6/tasks/install.rb @@ -0,0 +1,20 @@ +WIN32 = (RUBY_PLATFORM =~ /win32|mingw|bccwin|cygwin/) rescue nil +SUDO = WIN32 ? '' : ('sudo' unless ENV['SUDOLESS']) + +desc "Install #{GEM_NAME}" +if WIN32 + task :install => :gem do + system %{gem install --no-rdoc --no-ri -l pkg/#{GEM_NAME}-#{GEM_VERSION}.gem} + end + namespace :dev do + desc 'Install for development (for windows)' + task :winstall => :gem do + warn "You can now call 'rake install' instead of 'rake dev:winstall'." + system %{gem install --no-rdoc --no-ri -l pkg/#{GEM_NAME}-#{GEM_VERSION}.gem} + end + end +else + task :install => :package do + sh %{#{SUDO} gem install --local pkg/#{GEM_NAME}-#{GEM_VERSION}.gem} + end +end