diff --git a/README.rdoc b/README.rdoc index d3e6518ae2..9d1f04279e 100644 --- a/README.rdoc +++ b/README.rdoc @@ -105,7 +105,7 @@ geemus says: "That should give you everything you need to get started, but let m == Contributing -* Find something you would like to work on. For suggestions look for the `easy`, `medium` and `hard` tags in the {issues}[http://github.com/geemus/fog/issues] +* Find something you would like to work on. For suggestions look for the `easy`, `medium` and `hard` tags in the {issues}[http://github.com/fog/fog/issues] * Fork the project and do your work in a topic branch. * Add shindo tests to prove your code works and run all the tests using `bundle exec rake`. * Rebase your branch against geemus/fog to make sure everything is up to date. @@ -116,7 +116,7 @@ geemus says: "That should give you everything you need to get started, but let m Wonder how you can get a lovely fog shirt? Look no further! * Blue shirts go to people who have contributed indirectly, great examples are writing blog posts or giving lightning talks. -* Grey shirts and a follow from @fog go to people who have made it on to the {contributors list}[https://github.com/geemus/fog/contributors] by submitting code. +* Grey shirts and a follow from @fog go to people who have made it on to the {contributors list}[https://github.com/fog/fog/contributors] by submitting code. * Black shirts go to people who have made it on to the {collaborators list}[https://github.com/api/v2/json/repos/show/geemus/fog/collaborators] by coercing geemus into adding them. == Additional Resources diff --git a/Rakefile b/Rakefile index 5fb59fafa2..cdd5dc98e2 100644 --- a/Rakefile +++ b/Rakefile @@ -98,6 +98,7 @@ end task :nuke do Fog.providers.each do |provider| + next if ['Vmfusion'].include?(provider) begin compute = Fog::Compute.new(:provider => provider) for server in compute.servers @@ -106,6 +107,17 @@ task :nuke do end rescue end + begin + dns = Fog::DNS.new(:provider => provider) + for zone in dns.zones + for record in zone.records + record.destroy rescue nil + end + Formatador.display_line("[#{provider}] destroying zone #{zone.identity}") + zone.destroy rescue nil + end + rescue + end end end @@ -191,8 +203,10 @@ task :changelog do changelog << '' require 'multi_json' - github_data = MultiJson.decode(Excon.get('http://github.com/api/v2/json/repos/show/geemus/fog').body) - data = github_data['repository'].reject {|key, value| !['forks', 'open_issues', 'watchers'].include?(key)} + github_repo_data = MultiJson.decode(Excon.get('http://github.com/api/v2/json/repos/show/fog/fog').body) + data = github_repo_data['repository'].reject {|key, value| !['forks', 'open_issues', 'watchers'].include?(key)} + github_collaborator_data = MultiJson.decode(Excon.get('http://github.com/api/v2/json/repos/show/fog/fog/collaborators').body) + data['collaborators'] = github_collaborator_data['collaborators'].length rubygems_data = MultiJson.decode(Excon.get('https://rubygems.org/api/v1/gems/fog.json').body) data['downloads'] = rubygems_data['downloads'] stats = [] @@ -236,6 +250,7 @@ task :changelog do 'Lincoln Stoll', 'Luqman Amjad', 'nightshade427', + 'Patrick Debois', 'Wesley Beary' ].include?(committer) next diff --git a/changelog.txt b/changelog.txt index 9db85398fa..b0c50b8ab8 100644 --- a/changelog.txt +++ b/changelog.txt @@ -1,3 +1,437 @@ +1.0.0 09/29/2011 a81be08ef2473af91f16f4926e5b3dfa962a34ae +========================================================= + +Stats! { 'collaborators' => 16, 'downloads' => 245745, 'forks' => 260, 'open_issues' => 13, 'watchers' => 1521 } + +MVP! Patrick Debois + +[Libvirt] + if transport is empty, ssh can't be enabled. thanks Patrick Debois + Enable to pass an libvirt_ip_command for looking up the mac -> ip_address . Using eval to allow for passing of mac address in ip_command. thanks Patrick Debois + corrected typo for appending string output to IO.popen. thanks Patrick Debois + initialize the ip_address as an empty string. thanks Patrick Debois + more specific error if the ip_command results in string that has no ip-address format. thanks Patrick Debois + Remove the newlines after running the local ip_command. thanks Patrick Debois + rename xml_desc to xml as an attribute and hide all non_dynamic attributes from fog console. thanks Patrick Debois + added blocked state and corrected crashed to shutoff state. thanks Patrick Debois + renamed 'raw' connection to raw in the Fog Connection. thanks Patrick Debois + +[Libvirt|Compute] + renamed all disk_ params for server creation to volume_ to make it consistent with the object type volume. thanks Patrick Debois + +[aws] + remove base64 require (duplicates require in fog/core). thanks geemus + +[aws/sqs] + Adding SQS mocking support. thanks Istvan Hoka + +[aws|acs] + Create ACS security_group model and collection. thanks Aaron Suggs + Improve security group tests. thanks Aaron Suggs + Adds ACS#delete_cache_security_group. thanks Benton Roberts + Added security group methods. thanks Benton Roberts + Update CacheSecurityGroup API to public beta 2011-07-15. thanks Benton Roberts + +[aws|cloudwatch] + Fix whitespace. thanks Jens Braeuer + +[aws|compute] + add snapshot method to volume model. thanks Andrei Serdeliuc + Correct path. thanks Dylan Egan + raise an ArgumentError if image_id is nil, otherwise an ugly InternalError is returned from AWS. thanks Dylan Egan + wait for ready before testing tags. thanks geemus + fixes for mocks tests. thanks geemus + fix formatting for mock security groups. thanks geemus + +[aws|dns] + fix parser path. thanks geemus + +[aws|elasticache] + refactor acs->elasticache. thanks Benton Roberts + refactor for whitespace / readability. thanks Benton Roberts + fix typo in Elasticache Security Group tests. thanks Benton Roberts + rename test file for shindo. thanks Benton Roberts + create and describe cache clusters. thanks Benton Roberts + delete cache clusters. thanks Benton Roberts + add Cache Cluster model and collection. thanks Benton Roberts + Fix bug in AWS::Elasticache::Cluster.get. thanks Benton Roberts + randomize cache cluster IDs in testing. thanks Benton Roberts + return nil on CacheClusterNotFound. thanks Benton Roberts + use Formatador for testing output. thanks Benton Roberts + move ClusterNotFound rescue code into Elasticache service definition. thanks Benton Roberts + change method profile for create_cache_cluster, delete_cache_cluster, and describe_cache_clusters. thanks Benton Roberts + change parameters for describe_cache_security_groups to ruby-friendly values. thanks Benton Roberts + remove port attribute from cluster model. thanks Benton Roberts + fix Elasticahce::Cluster.security_groups attribute. thanks Benton Roberts + implement modify_cache_cluster request. thanks Benton Roberts + cluster port number cannot be modified. thanks Benton Roberts + add cache node info to describe_cache_clusters. thanks Benton Roberts + add InvalidInstace error class. thanks Benton Roberts + remove optional parameters from Elasticache::Cluster. thanks Benton Roberts + show cluster node details by default in model. thanks Benton Roberts + add test for removing a cache node. thanks Benton Roberts + add pending_values to modified nodes. thanks Benton Roberts + implement RebootCacheCluster. thanks Benton Roberts + implement DescribeEvents. thanks Benton Roberts + implement basic parameter group requests. thanks Benton Roberts + implement describe_engine_default_parameters request. thanks Benton Roberts + implement Elasticache::ParameterGroup model and collection. thanks Benton Roberts + implement modify_cache_parameter_group request. thanks Benton Roberts + implement reset_cache_parameter_group request. thanks Benton Roberts + implement describe_cache_groups request. thanks Benton Roberts + test fix: change DESCRIBE_SECURITY_GROUPS helper format. thanks Benton Roberts + delete outdated test file. thanks Benton Roberts + +[aws|elb] + Raise a custom exception for Throttling. thanks Dylan Egan + wait_for server to be ready? before register. thanks geemus + +[aws|iam] + implement correct path behaviour in mocking. thanks Dylan Egan + +[aws|simpledb] + fix tests to use proper accessor. thanks geemus + +[aws|storage] + fix acl mocking. thanks geemus + +[bluebox|compute] + Fixed instance state. thanks Lee Huffman + Create and destroy images. thanks Lee Huffman + Fix for setting hostname on server save. thanks Lee Huffman + Expect correct status code for template create. thanks Lee Huffman + +[cdn|aws] + move aws cdn to its own shared area (namespacing should probably be corrected). thanks geemus + +[cdn|rackspace] + move rackspace cdn to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute] + fix service calls I missed in recent rearrange. thanks geemus + +[compute|aws] + - Change modify_instance_attribute name to match EC2 API method, and actually make it do something. thanks Caleb Tennis + Include ids of things we're modifying in requests. thanks Dan Peterson + Fix create_volume mock when creating from a snapshot. thanks Dan Peterson + Make get_bucket_location mock return LocationConstraint as API doc describes. thanks Dan Peterson + Fix associate_address mock to detach/revert previous addresses properly. thanks Dan Peterson + Don't warn in mock describe_snapshots if RestorableBy is 'self'. thanks Dan Peterson + When mocking, instances don't show up right away. thanks Dan Peterson + Suffix with _tests.rb. thanks Dylan Egan + IpPermissionsEgress is returned from AWS. thanks Dylan Egan + Simple test to verify revoke_group_and_owner behaviour. thanks Dylan Egan + Apparently passing a nil value works against live AWS. Only use SourceSecurityGroupOwnerId in mocks if supplied. thanks Dylan Egan + Since this is really proving the use of nil, let's just not pretend there's a value for owner_id. thanks Dylan Egan + sometimes the platform string is returned. thanks Dylan Egan + enable tests for mocked tags. thanks Dylan Egan + Fix NameError. thanks Jens Braeuer + Fix bug in tag mocking preventing servers from being updated with new tags. thanks Matt Griffin + Add mocking for describe_tags. thanks Matt Griffin + move aws compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|bluebox] + move bluebox compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|brightbox] + Allow persistent option to be passed to Brightbox::Compute. thanks Caius Durling + Updated test for new behaviour. thanks Paul Thornthwaite + Picking up more attributes from Account. thanks Paul Thornthwaite + No need to hardcode a server type. thanks Paul Thornthwaite + Updated and reordered model attributes. thanks Paul Thornthwaite + Updates to tests. thanks Paul Thornthwaite + Added resave warning to a few Brightbox models. thanks Paul Thornthwaite + Requests for server group management. thanks Paul Thornthwaite + Merge in test updates and server groups. thanks Paul Thornthwaite + Corrected require missed in update. thanks Paul Thornthwaite + Reset times to the correct type so not string attributes. thanks Paul Thornthwaite + Updated Format test to remove gone fields. thanks Paul Thornthwaite + Fixed typo in connection options. thanks Paul Thornthwaite + Added missing requests. thanks Paul Thornthwaite + Added requests for firewall management. thanks Paul Thornthwaite + Added ServerGroup model and collections. thanks Paul Thornthwaite + Passing options to server group update. thanks Paul Thornthwaite + Fixed server_groups.get. thanks Paul Thornthwaite + move brightbox compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|ecloud] + move ecloud compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|glesys] + added glesys as provider. thanks Anton Lindstrom + added tests. thanks Anton Lindström + fixed logical error for default values. thanks Anton Lindström + fixed an invalid character. thanks Anton Lindström + consistency/cleanup. thanks geemus + fix format for start vs stop. thanks geemus + rearrange to match current naming conventions. thanks geemus + +[compute|go_grid] + move go_grid compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|libvirt] + merge jedi4ever/libvirt. thanks geemus + move libvirt compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|linode] + move linode compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|new_servers] + move new_servers compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|ninefold] + move ninefold compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|rackspace] + move rackspace compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|slicehost] + move slicehost compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|storm_on_demand] + move storm_on_demand compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|vcloud] + move vcloud compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|virtual_box] + move virtual_box compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[compute|voxel] + move voxel compute to its own shared area (namespacing should probably be corrected). thanks geemus + +[core] + Allow FOG_CREDENTIAL env variable for config. thanks Aaron Suggs + add collection#destroy(identity). thanks geemus + move openssl to more central location. thanks geemus + first steps toward seperately requirable bits. thanks geemus + move providers to lib/fog/. thanks geemus + work toward separate requires. thanks geemus + prototype logger. thanks geemus + add get/set methods for logger channels. thanks geemus + use logger throughout for warnings. thanks geemus + coerce service credentials. thanks geemus + delete nil valued keys from config. thanks geemus + pass connection_options through service init. thanks geemus + don't rely on bin stuff for service init in tests. thanks geemus + dedup services listings. thanks geemus + more convenient accessors. thanks geemus + fixing more paths after rearrange. thanks geemus + add credentials setter. thanks geemus + make sure credentials tests properly reset after completion. thanks geemus + bump excon dep. thanks geemus + properly fix credentials tests. thanks geemus + skip vmfusion in rake nuke. thanks geemus + bump excon. thanks geemus + kill dns stuff in nuke as well. thanks geemus + +[dns] + update dns constructor to match recent file moves. thanks geemus + +[dns|aws] + move aws dns to its own shared area (namespacing should probably be corrected). thanks geemus + +[dns|bluebox] + move bluebox dns to its own shared area (namespacing should probably be corrected). thanks geemus + +[dns|dnsimple] + move dnsimple dns to its own shared area (namespacing should probably be corrected). thanks geemus + +[dns|dnsmadeeasy] + move dnsmadeeasy dns to its own shared area (namespacing should probably be corrected). thanks geemus + +[dns|dynect] + move dynect dns to its own shared area (namespacing should probably be corrected). thanks geemus + +[dns|linode] + move linode dns to its own shared area (namespacing should probably be corrected). thanks geemus + +[dns|rackspace] + initial commit. thanks Brian Hartsock + list_domains request. thanks Brian Hartsock + added attributes to list_domains; refactored rackspace errors to be shared with load balancers. thanks Brian Hartsock + move rackspace dns to its own shared area (namespacing should probably be corrected). thanks geemus + +[dns|slicehost] + move slicehost dns to its own shared area (namespacing should probably be corrected). thanks geemus + +[dns|zerigo] + move zerigo dns to its own shared area (namespacing should probably be corrected). thanks geemus + +[doc] + Added blogpost about libvirt into fog to the press page. thanks Patrick Debois + corrected the link to the actual blogpost instead of the github markdown page :). thanks Patrick Debois + +[docs] + add note about ec2 default username. thanks geemus + +[dynect|dns] + use a string for now. #362 is open for accepting symbols in mocks. thanks Dylan Egan + return the zone name. thanks Dylan Egan + accidentally hardcoded the record type in the mocked data. thanks Dylan Egan + support ANY record results. thanks Dylan Egan + Don't use address as different records have different arguments, just send rdata. Remove value. Add CNAME test. thanks Dylan Egan + find, not first. thanks Dylan Egan + always ensure it's an integer. thanks Dylan Egan + retry if auth_token was previously set and error message includes possible login expiration. thanks Dylan Egan + support reauth for inactivity logout too. thanks Dylan Egan + +[glesys|compute] + fixes to play nice with mock tests. thanks geemus + skip flavor tests. thanks geemus + +[gleysys] + fixes for mocked test setup. thanks geemus + +[libvirt] + Added option libvirt_ip_command to the credentials error page. thanks Patrick Debois + Corrected template variable from interface_nat_network to network_nat_network. thanks Patrick Debois + +[linode|compute] + update format for plans. thanks geemus + +[load balancer|rackspace] + fixed some minor bugs i noticed in the tests. thanks Brian Hartsock + +[misc] + Fixed a couple of errors in the examples. thanks Bobby Wilson + Implement fog support for the Openstack Compute API v1.1. Includes support for legacy v1.0 style auth and v2.0 keystone auth. thanks Dan Prince + Add create_image to server model. thanks Dan Prince + Add support for non-strict validations, and nullable arrays/hashes. thanks Dan Prince + Additions and updates to the OpenStack API tests. thanks Dan Prince + Beginning of Dynect::DNS mocking support. thanks Dylan Egan + get_record, single. thanks Dylan Egan + Tidy up a bit. thanks Dylan Egan + Support freeze and thaw. thanks Dylan Egan + sleep for 3 seconds when running against Dynect because otherwise there is an operation still in progress. thanks Dylan Egan + raise a NotFound if not found. thanks Dylan Egan + Fog::DNS::Dynect, not Fog::Dynect::DNS. thanks Dylan Egan + InstanceId should have index according to AWS Docs. thanks E.J. Finneran + fix indenting to get markdown to recognise the code block properly. thanks Glenn Tweedie + Better URL escaping for Rackspace Cloud Files. thanks H. Wade Minter + Tweak to escape the Cloud Files filename before passing to public_url. thanks H. Wade Minter + Put escaping logic into the collection get_url call. thanks H. Wade Minter + (#9241) Add skeleton VMware vSphere platform support. thanks Jeff McCune + (#9241) Add SSL verification. thanks Jeff McCune + (#9241) Add current_time request. thanks Jeff McCune + (#9241) Add model for Fog::Compute[:vsphere].servers. thanks Jeff McCune + (#9241) Add test skeleton framework. thanks Jeff McCune + (#9241) Add ability to find VMs by UUID. thanks Jeff McCune + (#9421) Add start, stop, reboot server model methods. thanks Jeff McCune + (#9241) Add destroy API request and model action. thanks Jeff McCune + (#9241) Add find_template_by_instance_uuid request. thanks Jeff McCune + (#9241) Add vm_clone API request. thanks Jeff McCune + (#9241) Don't fail when trying to model a cloning VM. thanks Jeff McCune + (#9241) Make the reload action of the server models work. thanks Jeff McCune + (#9124) Add ability to reload the model of a cloning VM. thanks Jeff McCune + Refactor requests to return simple hashes and add unit tests. thanks Jeff McCune + Add vsphere_server connection attribute. thanks Jeff McCune + Fix vm clone problem when a Guid instance is passed as the instance_uuid. thanks Jeff McCune + Fix documentation. The resulting hash has no entry "PutScalingPolicyResponse", but a "...Result". thanks Jens Braeuer + Pass hostname to create_block request if provided. thanks Lee Huffman + Added Fog::CurrentMachine#ip_address. thanks Pan Thomakos + First cut at libvirt integration. Lots of features missing, but it proves the point. thanks Patrick Debois + - Added URI helper to parse libvirt URL's - exposed Libvirt original connection in Compute object - exposed URI in Compute object - added libvirt-ruby gem to the developer Gemspec. thanks Patrick Debois + - Get all pools now by name or by uuid - Create pool by providing xml - Destroy pool. thanks Patrick Debois + Added ability to create/destroy volumes You can search for volumes by path,key,name And list all volumes from a pool. thanks Patrick Debois + Allow creation of persistent or non persistent volumes. thanks Patrick Debois + Again major breakthrough. thanks Patrick Debois + Coming along nicely. thanks Patrick Debois + - ERB has a problem with a variable called type, it expands it on ruby 1.8 to .class - If the key or the volume is not found, maybe because the pool has not yet been started, the volumes should return nil. thanks Patrick Debois + Changed the monitoring command for IP addresses arpwatch.dat is not the correct place, it should be via syslog, or seperate file. thanks Patrick Debois + fixing whitespace. thanks Patrick Debois + removed trailing spaces. thanks Patrick Debois + indenting the files. thanks Patrick Debois + check ip-address that returned from the search in the logfile. thanks Patrick Debois + Added a way to locally retrieve the ipaddress through the ip_command More checks on correctness of ipaddress And checks on ssh failures. thanks Patrick Debois + renamed ipaddress to ip_address made the .id available and an alias to uuid for server. thanks Patrick Debois + Added description on the libvirt environment can be initialized and the requirements for ssh and ipaddress to work. thanks Patrick Debois + Added a global libvirt provider option ip_command to specify the ip_command Also more robust handling of connection error when the libvirt connection fails. thanks Patrick Debois + Remove the idea of template_options, now you specify the param directly in the create command. Unit and Size are now calculated. thanks Patrick Debois + Removed the template_options param. thanks Patrick Debois + Fixed disk_format_type vs disk_type_format difference and changed disk_format_type in the template as well. thanks Patrick Debois + added openauth support thanks to @rubiojr. thanks Patrick Debois + changed return code of state to string instead of symbols to be consistent with aws provider. thanks Patrick Debois + - Added concept of nodes (host of domains = node) - Renamed the shuttingdown to shutting-down mode - fixed the Gem warning on using Gem.find_by_name instead of Gem::Specification. thanks Patrick Debois + Added a way to filter the active and the defined servers(domains) using - servers.all(:active => false, :defined=> true). thanks Patrick Debois + Fixed empty filter issue, nil filter. thanks Patrick Debois + * Fixed an error with memory_size 256 that should be 256*1024 as the default is K nor M * Changed the ip_command to check the ipaddress to include changes not * only new IPaddresses. thanks Patrick Debois + Added libvirt options to credentials error. thanks Patrick Debois + Made libvirt username param consistent with other providers libvirt_user -> libvirt_username. thanks Patrick Debois + [Libvirt] Provided better solution for ip_command : use shell variable instead of ruby string for mac-address. thanks Patrick Debois + vmfusion provider , requires the fission gem (pull request pending). thanks Patrick Debois + fixed missing disk-> volume conversion. thanks Patrick Debois + another log entry style resused old ethernet. thanks Patrick Debois + Fix warning about whitespace before parentheses in dns.rb. thanks Rick Bradley + Added support fo canned ACLs in put_object_acl. thanks dblock + Updated put_bucket_acl to support canned ACLs. thanks dblock + Marking as pending. thanks dblock + Refactored specs, mocks, etc. thanks dblock + Revert "[core] make sure credentials tests properly reset after completion". thanks geemus + Update gemspec description to mention popular services that are supported. thanks watsonian + +[ninefold|compute] + fixes for list formats. thanks geemus + fix for network formats. thanks geemus + add default (ubuntu) image for servers. thanks geemus + +[rackspace|dns] + all important domains requests. thanks Brian Hartsock + zone models. thanks Brian Hartsock + records requests. thanks Brian Hartsock + record models. thanks Brian Hartsock + minor docs update. thanks Brian Hartsock + add mock initializer. thanks geemus + consistency fixes and tests and mark pending in mocked. thanks geemus + fix mock init to play nice with tests. thanks geemus + fixes for updates to beta. thanks geemus + +[rackspace|load_balancers] + fix path for tests. thanks geemus + fixes for tests. thanks geemus + +[rackspace|storage] + fix broken model paths. thanks geemus + +[release] + update MVP skip list. thanks geemus + add collaborator count to changelog stats. thanks geemus + +[storage] + Fixed what appeared to be broken test (I only verified with Rackspace provider). thanks Brian Hartsock + +[storage|aws] + Add options to File#copy method. thanks Aaron Suggs + move aws storage back with other aws stuff (namespacing should probably be recorrected as well). thanks geemus + +[storage|google] + move google storage to shared google stuff (namespacing should probably be corrected). thanks geemus + +[storage|local] + move local storage to its own shared area (namespacing should probably be corrected). thanks geemus + +[storage|ninefold] + move ninefold storage to its own shared area (namespacing should probably be corrected). thanks geemus + use Fog::HMAC. thanks geemus + +[storage|rackspace] + Fixed NotFound namespace. thanks Grégory Karékinian + move rackspace storage to its own shared area (namespacing should probably be corrected). thanks geemus + +[tests] + rearrange to match new lib structure. thanks geemus + mark not implemented mocks as pending. thanks geemus + more helpful formats helper errors. thanks geemus + +[vmfusion|compute] + fixed destroy function. thanks Patrick Debois + reworked structure as will be released in 0.4.0a. thanks Patrick Debois + +[vsphere|compute] + mark test requiring guid pending, as require can not be found. thanks geemus + remove unnecessary mocha require. thanks geemus + + 0.11.0 08/18/2011 73bcee507a4732e071c58d85793b7f307eb377dc ========================================================== diff --git a/docs/_layouts/default.html b/docs/_layouts/default.html index effe88a63f..4730aeb88f 100755 --- a/docs/_layouts/default.html +++ b/docs/_layouts/default.html @@ -35,32 +35,32 @@

{{ page.title }}

version
vX.Y.Z
install
gem install fog
-
source
geemus/fog
+
source
geemus/fog
-
- - {{ content }} - -

Services

+ + +
+ + {{ content }}

About

diff --git a/docs/about/contributing.markdown b/docs/about/contributing.markdown index 79803ea38f..7d3af6b2ec 100644 --- a/docs/about/contributing.markdown +++ b/docs/about/contributing.markdown @@ -86,7 +86,7 @@ end ## Tests -Now would be a good time to write some tests to make sure what you have written works (and will continue to). I've tried a couple variations on testing in the past, but have settled on consolidated lifetime testing. These vary enough that its hard to give a single simple example, but you can see many examples in "tests/compute/requests/aws":https://github.com/geemus/fog/tree/master/tests/compute/requests/aws/. +Now would be a good time to write some tests to make sure what you have written works (and will continue to). I've tried a couple variations on testing in the past, but have settled on consolidated lifetime testing. These vary enough that its hard to give a single simple example, but you can see many examples in "tests/compute/requests/aws":https://github.com/fog/fog/tree/master/tests/compute/requests/aws/. ### Highlights: * Reuse the same objects and take them through their whole life cycle (this is much faster, and most of the time if one portion fails the others would anyway). diff --git a/docs/about/getting_started.markdown b/docs/about/getting_started.markdown index 4fae741627..0e0b769cbe 100644 --- a/docs/about/getting_started.markdown +++ b/docs/about/getting_started.markdown @@ -24,7 +24,7 @@ Now in order to play with our data we need to setup a storage connection. storage = Fog::Storage.new({ :local_root => '~/fog', - :provider => 'Local', + :provider => 'Local' }) `storage` will now contain our storage object, configured to use the Local provider from our specified directory. @@ -33,7 +33,7 @@ Now in order to play with our data we need to setup a storage connection. Now that you have cleared the preliminaries you are ready to start storing data. Storage providers in fog segregate files into `directories` to make it easier to organize things. So lets create a directory so we can see that in action. - directory = Fog::Storage.directories.create( + directory = storage.directories.create( :key => 'data' ) diff --git a/docs/about/press.markdown b/docs/about/press.markdown index 9f03d797c0..b26275b624 100644 --- a/docs/about/press.markdown +++ b/docs/about/press.markdown @@ -5,6 +5,10 @@ title: Press Mentions and blog posts from elsewhere in reverse chronological order by day (and alphasorted for same days). +**September 13th, 2011** + +* [Libvirt support for fog](http://jedi.be/blog/2011/09/13/libvirt-fog-provider/) + **August 1st, 2011** * [Using EBS Snapshots with Fog](http://www.mediamolecule.com/lab/article/using_ebs_snapshots_with_fog/) diff --git a/docs/dns/index.markdown b/docs/dns/index.markdown index f136682f57..3de8cbf2e0 100644 --- a/docs/dns/index.markdown +++ b/docs/dns/index.markdown @@ -41,7 +41,7 @@ Now that you have a zone you will need to update your registrar to let them know With your new zone in hand you can add records as needed. First and foremost you will probably want the 'www' version of your site to point to whatever your ip might be: record = @zone.records.create( - :ip => '1.2.3.4', + :value => '1.2.3.4', :name => 'example.com', :type => 'A' ) @@ -49,7 +49,7 @@ With your new zone in hand you can add records as needed. First and foremost yo Adding other records is similarly easy, for instance if we want 'www.example.com' to go to the same place, we can use a cname record: record = @zone.records.create( - :ip => 'example.com', + :value => 'example.com', :name => 'www', :type => 'CNAME' ) @@ -57,7 +57,7 @@ Adding other records is similarly easy, for instance if we want 'www.example.com Or, similarly you might want to have your blog elsewhere: record = @zone.records.create( - :ip => '4.3.2.1', + :value => '4.3.2.1', :name => 'blog.example.com', :type => 'A' ) @@ -76,4 +76,4 @@ If you already have an account with another service you can just as easily use t ## Go Forth and Resolve -You can see an example of reusing code like this in the examples folder. Using this makes it easier to give yourself shortcuts to your cloud servers and manage how clients and users access them as well. It is great to have this flexibility so that you can modify your cloud infrastructure as needed while keeping everything ship shape. It also provides a nice way to create custom subdomains for users and just generally round out your cloud solution. +You can see an example of reusing code like this in the examples folder. Using this makes it easier to give yourself shortcuts to your cloud servers and manage how clients and users access them as well. It is great to have this flexibility so that you can modify your cloud infrastructure as needed while keeping everything ship shape. It also provides a nice way to create custom subdomains for users and just generally round out your cloud solution. diff --git a/docs/index.markdown b/docs/index.markdown index 727bd6a6a9..1e0a785f74 100644 --- a/docs/index.markdown +++ b/docs/index.markdown @@ -42,7 +42,7 @@ geemus says: "That should give you everything you need to get started, but let m ## Contributing -* Find something you would like to work on. For suggestions look for the `easy`, `medium` and `hard` tags in the [issues](http://github.com/geemus/fog/issues) +* Find something you would like to work on. For suggestions look for the `easy`, `medium` and `hard` tags in the [issues](http://github.com/fog/fog/issues) * Fork the project and do your work in a topic branch. * Add shindo tests to prove your code works and run all the tests using `bundle exec rake`. * Rebase your branch against geemus/fog to make sure everything is up to date. @@ -53,8 +53,8 @@ geemus says: "That should give you everything you need to get started, but let m Wonder how you can get a lovely fog shirt? Look no further! * Blue shirts go to people who have contributed indirectly, great examples are writing blog posts or giving lightning talks. -* Grey shirts and a follow from @fog go to people who have made it on to the [contributors list](https://github.com/geemus/fog/contributors) by submitting code. -* Black shirts go to people who have made it on to the [collaborators list](https://github.com/api/v2/json/repos/show/geemus/fog/collaborators) by coercing geemus into adding them (geemus is currently the only member of this list). +* Grey shirts and a follow from @fog go to people who have made it on to the [contributors list](https://github.com/fog/fog/contributors) by submitting code. +* Black shirts go to people who have made it on to the [collaborators list](https://github.com/api/v2/json/repos/show/geemus/fog/collaborators) by coercing geemus into adding them. ## Resources @@ -65,7 +65,7 @@ Enjoy, and let me know what I can do to continue improving fog! * Stay up to date by following [@fog](http://twitter.com/fog) and/or [@geemus](http://twitter.com/geemus) on Twitter. * Get and give help on the [#ruby-fog](irc://irc.freenode.net/ruby-fog) irc channel on Freenode * Follow release notes and discussions on the [mailing list](http://groups.google.com/group/ruby-fog) -* Report bugs or find tasks to help with in the [issues](http://github.com/geemus/fog/issues) +* Report bugs or find tasks to help with in the [issues](http://github.com/fog/fog/issues) * Learn about [contributing](/about/contributing.html) * See where fog is used and let the world know how you use it [in the wild](/about/users.html) * Check out blog posts and other mentions in the [press](/about/press.html) diff --git a/docs/public/css/fog.css b/docs/public/css/fog.css index 3aed11143e..de993a7f13 100644 --- a/docs/public/css/fog.css +++ b/docs/public/css/fog.css @@ -56,7 +56,7 @@ header dl dt { font-weight: bold; } -#main { +nav, #main { background-color: #FFF; -moz-border-radius: 0.5em; border-radius: 0.5em; @@ -66,6 +66,19 @@ header dl dt { margin-bottom: 2em; } +nav { + padding: 1em; +} + +nav li { + display: inline; + padding-left: 2em; +} + +nav li a { + font-size: 1.5em; +} + footer { background-color: #A0C0E1; border-color: #70A1D2; diff --git a/docs/storage/index.markdown b/docs/storage/index.markdown index 1de5cec227..b81bce78cc 100644 --- a/docs/storage/index.markdown +++ b/docs/storage/index.markdown @@ -5,13 +5,13 @@ title: Storage Having Ruby experience makes you hirable; but how can you stand out? You need to demonstrate your abilities. What better way than using Ruby and "the cloud" to store and serve your resume! -In this blog post you will learn to use fog - the cloud computing library - to upload your resume to Amazon's Simple Storage Service (S3), Rackspace's CloudFiles or Google's Storage for Developers. +In this blog post you will learn to use fog - the cloud computing library - to upload your resume to Amazon's Simple Storage Service (S3), Rackspace's CloudFiles or Google's Storage for Developers. Here's my out of date resume stored on S3, CloudFiles and Google Storage; programmatically stored in the cloud using this tutorial. NOTE: my boss would like me to add that I'm not currently looking for a new gig ;) Check out those cloud-specific URLs! You could put all three in your job application, add the Ruby source for how you did it, and have your choice of Ruby jobs for being so awesome! -How? The all-clouds-in-one library of choice is fog. +How? The all-clouds-in-one library of choice is fog. ## Installing fog diff --git a/fog.gemspec b/fog.gemspec index d7ade918dc..5b115d3992 100644 --- a/fog.gemspec +++ b/fog.gemspec @@ -6,21 +6,21 @@ Gem::Specification.new do |s| ## If your rubyforge_project name is different, then edit it and comment out ## the sub! line in the Rakefile s.name = 'fog' - s.version = '0.11.0' - s.date = '2011-08-18' + s.version = '1.0.0' + s.date = '2011-09-29' s.rubyforge_project = 'fog' ## Make sure your summary is short. The description may be as long ## as you like. s.summary = "brings clouds to you" - s.description = "The Ruby cloud services library." + s.description = "The Ruby cloud services library. Supports all major cloud providers including AWS, Rackspace, Linode, Blue Box, StormOnDemand, and many others. Full support for most AWS services including EC2, S3, CloudWatch, SimpleDB, ELB, and RDS." ## List the primary authors. If there are a bunch of authors, it's probably ## better to set the email to an email list or something. If you don't have ## a custom homepage, consider using your GitHub URL or the like. s.authors = ["geemus (Wesley Beary)"] s.email = 'geemus@gmail.com' - s.homepage = 'http://github.com/geemus/fog' + s.homepage = 'http://github.com/fog/fog' ## This sections is only necessary if you have C extensions. # s.require_paths << 'ext' @@ -37,7 +37,7 @@ Gem::Specification.new do |s| ## List your runtime dependencies here. Runtime dependencies are those ## that are needed for an end user to actually USE your code. s.add_dependency('builder') - s.add_dependency('excon', '~>0.6.5') + s.add_dependency('excon', '~>0.7.4') s.add_dependency('formatador', '~>0.2.0') s.add_dependency('multi_json', '~>1.0.3') s.add_dependency('mime-types') diff --git a/lib/fog.rb b/lib/fog.rb index ae2a02ed11..a6a5958203 100644 --- a/lib/fog.rb +++ b/lib/fog.rb @@ -3,7 +3,7 @@ module Fog unless const_defined?(:VERSION) - VERSION = '0.11.0' + VERSION = '1.0.0' end end diff --git a/lib/fog/aws.rb b/lib/fog/aws.rb index 43c3ec3864..48b78de340 100644 --- a/lib/fog/aws.rb +++ b/lib/fog/aws.rb @@ -5,21 +5,22 @@ module AWS extend Fog::Provider - service(:auto_scaling, 'aws/auto_scaling') - service(:cdn, 'aws/cdn') - service(:compute, 'aws/compute') - service(:cloud_formation, 'aws/cloud_formation') - service(:cloud_watch, 'aws/cloud_watch') - service(:dns, 'aws/dns') - service(:elb, 'aws/elb') - service(:emr, 'aws/emr') - service(:iam, 'aws/iam') - service(:rds, 'aws/rds') - service(:ses, 'aws/ses') - service(:simpledb, 'aws/simpledb') - service(:sns, 'aws/sns') - service(:sqs, 'aws/sqs') - service(:storage, 'aws/storage') + service(:auto_scaling, 'aws/auto_scaling', 'AutoScaling') + service(:cdn, 'aws/cdn', 'CDN') + service(:compute, 'aws/compute', 'Compute') + service(:cloud_formation, 'aws/cloud_formation', 'CloudFormation') + service(:cloud_watch, 'aws/cloud_watch', 'CloudWatch') + service(:dns, 'aws/dns', 'DNS') + service(:elasticache, 'aws/elasticache', 'Elasticache') + service(:elb, 'aws/elb', 'ELB') + service(:emr, 'aws/emr', 'EMR') + service(:iam, 'aws/iam', 'IAM') + service(:rds, 'aws/rds', 'RDS') + service(:ses, 'aws/ses', 'SES') + service(:simpledb, 'aws/simpledb', 'SimpleDB') + service(:sns, 'aws/sns', 'SNS') + service(:sqs, 'aws/sqs', 'SQS') + service(:storage, 'aws/storage', 'Storage') def self.indexed_param(key, values) params = {} @@ -202,6 +203,8 @@ def self.request_id class << self alias :reserved_instances_id :request_id alias :reserved_instances_offering_id :request_id + alias :sqs_message_id :request_id + alias :sqs_sender_id :request_id end def self.reservation_id diff --git a/lib/fog/aws/cloud_formation.rb b/lib/fog/aws/cloud_formation.rb index 061fe06029..78ecf4387e 100644 --- a/lib/fog/aws/cloud_formation.rb +++ b/lib/fog/aws/cloud_formation.rb @@ -9,6 +9,7 @@ class CloudFormation < Fog::Service request_path 'fog/aws/requests/cloud_formation' request :create_stack + request :update_stack request :delete_stack request :describe_stack_events request :describe_stack_resources diff --git a/lib/fog/aws/cloud_watch.rb b/lib/fog/aws/cloud_watch.rb index eb90e13efa..a1dd9c12f4 100644 --- a/lib/fog/aws/cloud_watch.rb +++ b/lib/fog/aws/cloud_watch.rb @@ -13,12 +13,26 @@ class CloudWatch < Fog::Service request :list_metrics request :get_metric_statistics request :put_metric_data + request :describe_alarms + request :put_metric_alarm + request :delete_alarms + request :describe_alarm_history + request :enable_alarm_actions + request :disable_alarm_actions + request :describe_alarms_for_metric + request :set_alarm_state model_path 'fog/aws/models/cloud_watch' model :metric collection :metrics model :metric_statistic collection :metric_statistics + model :alarm_datum + collection :alarm_data + model :alarm_history + collection :alarm_histories + model :alarm + collection :alarms class Mock diff --git a/lib/fog/aws/compute.rb b/lib/fog/aws/compute.rb index 444d4aa897..992f166220 100644 --- a/lib/fog/aws/compute.rb +++ b/lib/fog/aws/compute.rb @@ -93,7 +93,7 @@ class AWS < Fog::Service class Real def modify_image_attributes(*params) - Fog::Logger.warning("modify_image_attributes is deprecated, use modify_image_attribute instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("modify_image_attributes is deprecated, use modify_image_attribute instead [light_black](#{caller.first})[/]") modify_image_attribute(*params) end @@ -103,12 +103,17 @@ class Mock def self.data @data ||= Hash.new do |hash, region| - owner_id = Fog::AWS::Mock.owner_id hash[region] = Hash.new do |region_hash, key| + owner_id = Fog::AWS::Mock.owner_id region_hash[key] = { :deleted_at => {}, :addresses => {}, :images => {}, + :image_launch_permissions => Hash.new do |permissions_hash, image_key| + permissions_hash[image_key] = { + :users => [] + } + end, :instances => {}, :reserved_instances => {}, :key_pairs => {}, @@ -116,9 +121,10 @@ def self.data :owner_id => owner_id, :security_groups => { 'default' => { - 'groupDescription' => 'default group', - 'groupName' => 'default', - 'ipPermissions' => [ + 'groupDescription' => 'default group', + 'groupName' => 'default', + 'ipPermissionsEgress' => [], + 'ipPermissions' => [ { 'groups' => [{'groupName' => 'default', 'userId' => owner_id}], 'fromPort' => -1, @@ -141,12 +147,15 @@ def self.data 'ipRanges' => [] } ], - 'ownerId' => owner_id + 'ownerId' => owner_id } }, :snapshots => {}, :volumes => {}, - :tags => {} + :tags => {}, + :tag_sets => Hash.new do |tag_set_hash, resource_id| + tag_set_hash[resource_id] = {} + end } end end @@ -166,36 +175,58 @@ def initialize(options={}) end end + def region_data + self.class.data[@region] + end + def data - self.class.data[@region][@aws_access_key_id] + self.region_data[@aws_access_key_id] end def reset_data - self.class.data[@region].delete(@aws_access_key_id) + self.region_data.delete(@aws_access_key_id) + end + + def visible_images + images = self.data[:images].values.inject({}) do |h, image| + h.update(image['imageId'] => image) + end + + self.region_data.each do |aws_access_key_id, data| + data[:image_launch_permissions].each do |image_id, list| + if list[:users].include?(self.data[:owner_id]) + images.update(image_id => data[:images][image_id]) + end + end + end + + images end - def apply_tag_filters(resources, filters) + def apply_tag_filters(resources, filters, resource_id_key) + tag_set_fetcher = lambda {|resource| self.data[:tag_sets][resource[resource_id_key]] } + # tag-key: match resources tagged with this key (any value) if filters.has_key?('tag-key') value = filters.delete('tag-key') - resources = resources.select{|r| r['tagSet'].has_key?(value)} + resources = resources.select{|r| tag_set_fetcher[r].has_key?(value)} end - + # tag-value: match resources tagged with this value (any key) if filters.has_key?('tag-value') value = filters.delete('tag-value') - resources = resources.select{|r| r['tagSet'].values.include?(value)} + resources = resources.select{|r| tag_set_fetcher[r].values.include?(value)} end - - # tag:key: match resources taged with a key-value pair. Value may be an array, which is OR'd. + + # tag:key: match resources tagged with a key-value pair. Value may be an array, which is OR'd. tag_filters = {} - filters.keys.each do |key| + filters.keys.each do |key| tag_filters[key.gsub('tag:', '')] = filters.delete(key) if /^tag:/ =~ key end for tag_key, tag_value in tag_filters - resources = resources.select{|r| tag_value.include?(r['tagSet'][tag_key])} + resources = resources.select{|r| tag_value.include?(tag_set_fetcher[r][tag_key])} end - + resources end end @@ -205,7 +236,7 @@ class Real # Initialize connection to EC2 # # ==== Notes - # options parameter must include values for :aws_access_key_id and + # options parameter must include values for :aws_access_key_id and # :aws_secret_access_key in order to create a connection # # ==== Examples @@ -264,7 +295,7 @@ def reload end private - + def request(params) idempotent = params.delete(:idempotent) parser = params.delete(:parser) diff --git a/lib/fog/aws/elasticache.rb b/lib/fog/aws/elasticache.rb new file mode 100644 index 0000000000..5a21a67e87 --- /dev/null +++ b/lib/fog/aws/elasticache.rb @@ -0,0 +1,127 @@ +module Fog + module AWS + class Elasticache < Fog::Service + + class IdentifierTaken < Fog::Errors::Error; end + class InvalidInstance < Fog::Errors::Error; end + + requires :aws_access_key_id, :aws_secret_access_key + recognizes :region, :host, :path, :port, :scheme, :persistent + + request_path 'fog/aws/requests/elasticache' + + request :create_cache_cluster + request :delete_cache_cluster + request :describe_cache_clusters + request :modify_cache_cluster + request :reboot_cache_cluster + + request :create_cache_parameter_group + request :delete_cache_parameter_group + request :describe_cache_parameter_groups + request :modify_cache_parameter_group + request :reset_cache_parameter_group + request :describe_engine_default_parameters + request :describe_cache_parameters + + request :create_cache_security_group + request :delete_cache_security_group + request :describe_cache_security_groups + request :authorize_cache_security_group_ingress + request :revoke_cache_security_group_ingress + + request :describe_events + + model_path 'fog/aws/models/elasticache' + model :cluster + collection :clusters + model :security_group + collection :security_groups + model :parameter_group + collection :parameter_groups + + class Mock + def initalize(options={}) + Fog::Mock.not_implemented + end + end + + class Real + + def initialize(options={}) + @aws_access_key_id = options[:aws_access_key_id] + @aws_secret_access_key = options[:aws_secret_access_key] + @hmac = Fog::HMAC.new('sha256', @aws_secret_access_key) + + options[:region] ||= 'us-east-1' + @host = options[:host] || case options[:region] + when 'us-east-1' + 'elasticache.us-east-1.amazonaws.com' + #TODO: Support other regions + else + raise ArgumentError, "Unknown region: #{options[:region].inspect}" + end + @path = options[:path] || '/' + @port = options[:port] || 443 + @scheme = options[:scheme] || 'https' + @connection = Fog::Connection.new( + "#{@scheme}://#{@host}:#{@port}#{@path}", options[:persistent] + ) + end + + def reload + @connection.reset + end + + private + def request(params) + idempotent = params.delete(:idempotent) + parser = params.delete(:parser) + + body = Fog::AWS.signed_params( + params, + { + :aws_access_key_id => @aws_access_key_id, + :hmac => @hmac, + :host => @host, + :path => @path, + :port => @port, + :version => '2011-07-15' + } + ) + + begin + response = @connection.request({ + :body => body, + :expects => 200, + :headers => { 'Content-Type' => 'application/x-www-form-urlencoded' }, + :idempotent => idempotent, + :host => @host, + :method => 'POST', + :parser => parser + }) + rescue Excon::Errors::HTTPStatusError => error + if match = error.message.match(/(.*)<\/Code>/m) + case match[1] + when 'CacheSecurityGroupNotFound', 'CacheParameterGroupNotFound', + 'CacheClusterNotFound' + raise Fog::AWS::Elasticache::NotFound + when 'CacheSecurityGroupAlreadyExists' + raise Fog::AWS::Elasticache::IdentifierTaken + when 'InvalidParameterValue' + raise Fog::AWS::Elasticache::InvalidInstance + else + raise + end + else + raise + end + end + + response + end + + end + end + end +end diff --git a/lib/fog/aws/models/cloud_watch/alarm.rb b/lib/fog/aws/models/cloud_watch/alarm.rb new file mode 100644 index 0000000000..759537423c --- /dev/null +++ b/lib/fog/aws/models/cloud_watch/alarm.rb @@ -0,0 +1,12 @@ +require 'fog/core/model' + +module Fog + module AWS + class CloudWatch + + class Alarm < Fog::Model + attribute :alarm_names, :aliases => 'AlarmNames' + end + end + end +end diff --git a/lib/fog/aws/models/cloud_watch/alarm_data.rb b/lib/fog/aws/models/cloud_watch/alarm_data.rb new file mode 100644 index 0000000000..269452093c --- /dev/null +++ b/lib/fog/aws/models/cloud_watch/alarm_data.rb @@ -0,0 +1,39 @@ +require 'fog/core/collection' +require 'fog/aws/models/cloud_watch/alarm_datum' + +module Fog + module AWS + class CloudWatch + class AlarmData < Fog::Collection + model Fog::AWS::CloudWatch::AlarmDatum + + def all(conditions={}) + data = connection.describe_alarms(conditions).body['DescribeAlarmsResult']['MetricAlarms'] + load(data) # data is an array of attribute hashes + end + + def get(namespace, metric_name, dimensions=nil, period=nil, statistic=nil, unit=nil) + list_opts = {'Namespace' => namespace, 'MetricName' => metric_name} + if dimensions + dimensions_array = dimensions.collect do |name, value| + {'Name' => name, 'Value' => value} + end + list_opts.merge!('Dimensions' => dimensions_array) + end + if period + list_opts.merge!('Period' => period) + end + if statistic + list_opts.merge!('Statistic' => statistic) + end + if unit + list_opts.merge!('Unit' => unit) + end + data = connection.describe_alarms_for_metric(list_opts).body['DescribeAlarmsForMetricResult']['MetricAlarms'] + load(data) + end + + end + end + end +end diff --git a/lib/fog/aws/models/cloud_watch/alarm_datum.rb b/lib/fog/aws/models/cloud_watch/alarm_datum.rb new file mode 100644 index 0000000000..ea773e0103 --- /dev/null +++ b/lib/fog/aws/models/cloud_watch/alarm_datum.rb @@ -0,0 +1,66 @@ +require 'fog/core/model' + +module Fog + module AWS + class CloudWatch + + class AlarmDatum < Fog::Model + attribute :alarm_name, :aliases => 'AlarmName' + attribute :metric_name, :aliases => 'MetricName' + attribute :namespace, :aliases => 'Namespace' + attribute :dimensions, :aliases => 'Dimensions' + attribute :alarm_description, :aliases => 'AlarmDescription' + attribute :alarm_arn, :aliases => 'AlarmArn' + attribute :state_value, :aliases => 'StateValue' + attribute :statistic, :aliases => 'Statistic' + attribute :comparison_operator, :aliases => 'ComparisonOperator' + attribute :state_reason, :aliases => 'StateReason' + attribute :action_enabled, :aliases => 'ActionsEnabled' + attribute :period, :aliases => 'Period' + attribute :evaluation_periods, :aliases => 'EvaluationPeriods' + attribute :threshold, :aliases => 'Threshold' + attribute :alarm_actions, :aliases => 'AlarmActions' + attribute :ok_actions, :aliases => 'OKActions' + attribute :insufficient_actions, :aliases => 'InsufficientDataActions' + attribute :unit, :aliases => 'Unit' + attribute :state_updated_timestamp, :aliases => 'StateUpdatedTimestamp' + attribute :alarm_configuration_updated_timestamp, :aliases => 'AlarmConfigurationUpdatedTimestamp' + + def save + requires :alarm_name + requires :comparison_operator + requires :evaluation_periods + requires :metric_name + requires :namespace + requires :period + requires :statistic + requires :threshold + + alarm_definition = { + 'AlarmName' => alarm_name, + 'ComparisonOperator' => comparison_operator, + 'EvaluationPeriods' => evaluation_periods, + 'MetricName' => metric_name, + 'Namespace' => namespace, + 'Period' => period, + 'Statistic' => statistic, + 'Threshold' => threshold + } + + alarm_definition.merge!('ActionsEnabled' => action_enabled) if action_enabled + alarm_definition.merge!('AlarmActions' => alarm_actions) if alarm_actions + alarm_definition.merge!('AlarmDescription' => alarm_description) if alarm_description + + #dimension is an array of Name/Value pairs, ex. [{'Name'=>'host', 'Value'=>'localhost'},{'Name'=>'version', 'Value'=>'0.11.0'}] + alarm_definition.merge!('Dimensions' => dimensions) if dimensions + alarm_definition.merge!('InsufficientDataActions' => insufficient_actions) if insufficient_actions + alarm_definition.merge!('OKActions' => ok_actions) if ok_actions + alarm_definition.merge!('Unit' => unit) if unit + + connection.put_metric_alarm(alarm_definition) + true + end + end + end + end +end diff --git a/lib/fog/aws/models/cloud_watch/alarm_histories.rb b/lib/fog/aws/models/cloud_watch/alarm_histories.rb new file mode 100644 index 0000000000..7e4a76f958 --- /dev/null +++ b/lib/fog/aws/models/cloud_watch/alarm_histories.rb @@ -0,0 +1,18 @@ +require 'fog/core/collection' +require 'fog/aws/models/cloud_watch/alarm_history' + +module Fog + module AWS + class CloudWatch + class AlarmHistories < Fog::Collection + model Fog::AWS::CloudWatch::AlarmHistory + + def all(conditions={}) + data = connection.describe_alarm_history(conditions).body['DescribeAlarmHistoryResult']['AlarmHistoryItems'] + load(data) # data is an array of attribute hashes + end + + end + end + end +end diff --git a/lib/fog/aws/models/cloud_watch/alarm_history.rb b/lib/fog/aws/models/cloud_watch/alarm_history.rb new file mode 100644 index 0000000000..5fb4c6b4fc --- /dev/null +++ b/lib/fog/aws/models/cloud_watch/alarm_history.rb @@ -0,0 +1,16 @@ +require 'fog/core/model' + +module Fog + module AWS + class CloudWatch + + class AlarmHistory < Fog::Model + attribute :alarm_name, :aliases => 'AlarmName' + attribute :end_date, :aliases => 'EndDate' + attribute :history_item_type, :aliases => 'HistoryItemType' + attribute :max_records, :aliases => 'MaxRecords' + attribute :start_date, :aliases => 'StartDate' + end + end + end +end diff --git a/lib/fog/aws/models/cloud_watch/alarms.rb b/lib/fog/aws/models/cloud_watch/alarms.rb new file mode 100644 index 0000000000..46d362286c --- /dev/null +++ b/lib/fog/aws/models/cloud_watch/alarms.rb @@ -0,0 +1,30 @@ +require 'fog/core/collection' +require 'fog/aws/models/cloud_watch/alarm' + +module Fog + module AWS + class CloudWatch + + class Alarms < Fog::Collection + model Fog::AWS::CloudWatch::Alarm + + #alarm_names is an array of alarm names + def delete(alarm_names) + connection.delete_alarms(alarm_names) + true + end + + def disable(alarm_names) + connection.disable_alarm_actions(alarm_names) + true + end + + def enable(alarm_names) + connection.enable_alarm_actions(alarm_names) + true + end + + end + end + end +end diff --git a/lib/fog/aws/models/compute/addresses.rb b/lib/fog/aws/models/compute/addresses.rb index d50b50fe5d..22efb46b1b 100644 --- a/lib/fog/aws/models/compute/addresses.rb +++ b/lib/fog/aws/models/compute/addresses.rb @@ -56,7 +56,7 @@ def initialize(attributes) def all(filters = filters) unless filters.is_a?(Hash) - Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('public-ip' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("all with #{filters.class} param is deprecated, use all('public-ip' => []) instead [light_black](#{caller.first})[/]") filters = {'public-ip' => [*filters]} end self.filters = filters diff --git a/lib/fog/aws/models/compute/key_pairs.rb b/lib/fog/aws/models/compute/key_pairs.rb index e60350138e..fb8832924b 100644 --- a/lib/fog/aws/models/compute/key_pairs.rb +++ b/lib/fog/aws/models/compute/key_pairs.rb @@ -51,7 +51,7 @@ def initialize(attributes) def all(filters = filters) unless filters.is_a?(Hash) - Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('key-name' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("all with #{filters.class} param is deprecated, use all('key-name' => []) instead [light_black](#{caller.first})[/]") filters = {'key-name' => [*filters]} end self.filters = filters diff --git a/lib/fog/aws/models/compute/security_groups.rb b/lib/fog/aws/models/compute/security_groups.rb index 32ae5b98af..50a0fe0ff1 100644 --- a/lib/fog/aws/models/compute/security_groups.rb +++ b/lib/fog/aws/models/compute/security_groups.rb @@ -57,7 +57,7 @@ def initialize(attributes) def all(filters = filters) unless filters.is_a?(Hash) - Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('group-name' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("all with #{filters.class} param is deprecated, use all('group-name' => []) instead [light_black](#{caller.first})[/]") filters = {'group-name' => [*filters]} end self.filters = filters diff --git a/lib/fog/aws/models/compute/server.rb b/lib/fog/aws/models/compute/server.rb index b6548632f9..7d9d394537 100644 --- a/lib/fog/aws/models/compute/server.rb +++ b/lib/fog/aws/models/compute/server.rb @@ -169,8 +169,10 @@ def save data = connection.run_instances(image_id, 1, 1, options) merge_attributes(data.body['instancesSet'].first) - if self.tags - for key, value in self.tags + if tags = self.tags + # expect eventual consistency + Fog.wait_for { self.reload rescue nil } + for key, value in (self.tags = tags) connection.tags.create( :key => key, :resource_id => self.identity, diff --git a/lib/fog/aws/models/compute/servers.rb b/lib/fog/aws/models/compute/servers.rb index 0c9c30f465..bda8da67b5 100644 --- a/lib/fog/aws/models/compute/servers.rb +++ b/lib/fog/aws/models/compute/servers.rb @@ -49,7 +49,7 @@ class Servers < Fog::Collection # user_data=nil # > # - + def initialize(attributes) self.filters ||= {} super @@ -57,7 +57,7 @@ def initialize(attributes) def all(filters = self.filters) unless filters.is_a?(Hash) - Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('instance-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("all with #{filters.class} param is deprecated, use all('instance-id' => []) instead [light_black](#{caller.first})[/]") filters = {'instance-id' => [*filters]} end self.filters = filters diff --git a/lib/fog/aws/models/compute/snapshots.rb b/lib/fog/aws/models/compute/snapshots.rb index 04cd696ad7..5e751a9f14 100644 --- a/lib/fog/aws/models/compute/snapshots.rb +++ b/lib/fog/aws/models/compute/snapshots.rb @@ -19,7 +19,7 @@ def initialize(attributes) def all(filters = filters, options = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('snapshot-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("all with #{filters.class} param is deprecated, use all('snapshot-id' => []) instead [light_black](#{caller.first})[/]") filters = {'snapshot-id' => [*filters]} end self.filters = filters diff --git a/lib/fog/aws/models/compute/spot_request.rb b/lib/fog/aws/models/compute/spot_request.rb index b622175fb4..46bf450512 100644 --- a/lib/fog/aws/models/compute/spot_request.rb +++ b/lib/fog/aws/models/compute/spot_request.rb @@ -55,6 +55,27 @@ def initialize(attributes={}) super end + def destroy + requires :id + + connection.cancel_spot_instance_requests(id) + true + end + + def key_pair + requires :key_name + + connection.key_pairs.all(key_name).first + end + + def key_pair=(new_keypair) + self.key_name = new_keypair && new_keypair.name + end + + def ready? + state == 'active' + end + def save requires :image_id, :flavor_id, :price @@ -83,10 +104,6 @@ def save merge_attributes( spot_instance_request ) end - def ready? - state == 'active' - end - end end end diff --git a/lib/fog/aws/models/compute/spot_requests.rb b/lib/fog/aws/models/compute/spot_requests.rb index 61128d22a8..bd155a0c65 100644 --- a/lib/fog/aws/models/compute/spot_requests.rb +++ b/lib/fog/aws/models/compute/spot_requests.rb @@ -17,7 +17,7 @@ def initialize(attributes) def all(filters = self.filters) unless filters.is_a?(Hash) - Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('spot-instance-request-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("all with #{filters.class} param is deprecated, use all('spot-instance-request-id' => []) instead [light_black](#{caller.first})[/]") filters = {'spot-instance-request-id' => [*filters]} end self.filters = filters @@ -34,6 +34,50 @@ def all(filters = self.filters) ) end + def bootstrap(new_attributes = {}) + spot_request = connection.spot_requests.new(new_attributes) + + unless new_attributes[:key_name] + # first or create fog_#{credential} keypair + name = Fog.respond_to?(:credential) && Fog.credential || :default + unless spot_request.key_pair = connection.key_pairs.get("fog_#{name}") + spot_request.key_pair = connection.key_pairs.create( + :name => "fog_#{name}", + :public_key => server.public_key + ) + end + end + + # make sure port 22 is open in the first security group + security_group = connection.security_groups.get(spot_request.groups.first) + authorized = security_group.ip_permissions.detect do |ip_permission| + ip_permission['ipRanges'].first && ip_permission['ipRanges'].first['cidrIp'] == '0.0.0.0/0' && + ip_permission['fromPort'] == 22 && + ip_permission['ipProtocol'] == 'tcp' && + ip_permission['toPort'] == 22 + end + unless authorized + security_group.authorize_port_range(22..22) + end + + spot_request.save + spot_request.wait_for { ready? } + Fog.wait_for { server = connection.servers.get(spot_request.instance_id) } + server = connection.servers.get(spot_request.instance_id) + if spot_request.tags + for key, value in spot_request.tags + connection.tags.create( + :key => key, + :resource_id => spot_request.instance_id, + :value => value + ) + end + end + server.wait_for { ready? } + server.setup(:key_data => [server.private_key]) + server + end + def get(spot_request_id) if spot_request_id self.class.new(:connection => connection).all('spot-instance-request-id' => spot_request_id).first diff --git a/lib/fog/aws/models/compute/volume.rb b/lib/fog/aws/models/compute/volume.rb index 762d2b9d6c..efdb759bcf 100644 --- a/lib/fog/aws/models/compute/volume.rb +++ b/lib/fog/aws/models/compute/volume.rb @@ -68,6 +68,11 @@ def snapshots connection.snapshots(:volume => self) end + def snapshot(description) + requires :id + connection.create_snapshot(id, description) + end + def force_detach detach(true) end diff --git a/lib/fog/aws/models/compute/volumes.rb b/lib/fog/aws/models/compute/volumes.rb index cf7594d767..f84184f969 100644 --- a/lib/fog/aws/models/compute/volumes.rb +++ b/lib/fog/aws/models/compute/volumes.rb @@ -64,7 +64,7 @@ def initialize(attributes) def all(filters = filters) unless filters.is_a?(Hash) - Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('volume-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("all with #{filters.class} param is deprecated, use all('volume-id' => []) instead [light_black](#{caller.first})[/]") filters = {'volume-id' => [*filters]} end self.filters = filters diff --git a/lib/fog/aws/models/elasticache/cluster.rb b/lib/fog/aws/models/elasticache/cluster.rb new file mode 100644 index 0000000000..488393ca12 --- /dev/null +++ b/lib/fog/aws/models/elasticache/cluster.rb @@ -0,0 +1,69 @@ +require 'fog/core/model' + +module Fog + module AWS + class Elasticache + + class Cluster < Fog::Model + # simple attributes + identity :id, :aliases => 'CacheClusterId' + attribute :auto_upgrade, :aliases => 'AutoMinorVersionUpgrade' + attribute :status, :aliases => 'CacheClusterStatus' + attribute :node_type, :aliases => 'CacheNodeType' + attribute :engine, :aliases => 'Engine' + attribute :engine_version, :aliases => 'EngineVersion' + attribute :num_nodes, :aliases => 'NumCacheNodes' + attribute :zone, :aliases => 'PreferredAvailabilityZone' + attribute :port, :aliases => 'Port' + attribute :maintenance_window, :aliases => 'PreferredMaintenanceWindow' + # complex attributes + attribute :nodes, :aliases => 'CacheNodes', :type => :array + attribute :parameter_group, + :aliases => 'CacheParameterGroup', :type => :hash + attribute :pending_values, + :aliases => 'PendingModifiedValues', :type => :hash + attribute :create_time, + :aliases => 'CacheClusterCreateTime', :type => :date_time + attribute :security_groups, + :aliases => 'CacheSecurityGroups', :type => :array + attribute :notification_config, + :aliases => 'NotificationConfiguration', :type => :hash + + def ready? + status == 'available' + end + + def destroy + requires :id + connection.delete_cache_cluster(id) + true + end + + def save + requires :id + + parameter_group ||= Hash.new + notification_config ||= Hash.new + + connection.create_cache_cluster( + id, { + :node_type => node_type, + :security_group_names => security_groups, + :num_nodes => num_nodes, + :auto_minor_version_upgrade => auto_upgrade, + :engine => engine, + :engine_version => engine_version, + :notification_topic_arn => notification_config['TopicArn'], + :port => port, + :preferred_availablility_zone => zone, + :preferred_maintenance_window => maintenance_window, + :parameter_group_name => parameter_group['CacheParameterGroupName'], + } + ) + end + + end + + end + end +end diff --git a/lib/fog/aws/models/elasticache/clusters.rb b/lib/fog/aws/models/elasticache/clusters.rb new file mode 100644 index 0000000000..e99b93b995 --- /dev/null +++ b/lib/fog/aws/models/elasticache/clusters.rb @@ -0,0 +1,31 @@ +require 'fog/core/collection' +require 'fog/aws/models/elasticache/cluster' + +module Fog + module AWS + class Elasticache + + class Clusters < Fog::Collection + model Fog::AWS::Elasticache::Cluster + + def all + load( + connection.describe_cache_clusters( + nil, :show_node_info => true + ).body['CacheClusters'] + ) + end + + def get(identity, show_node_info = true) + new( + connection.describe_cache_clusters( + identity, :show_node_info => show_node_info + ).body['CacheClusters'].first + ) + rescue Fog::AWS::Elasticache::NotFound + end + end + + end + end +end diff --git a/lib/fog/aws/models/elasticache/parameter_group.rb b/lib/fog/aws/models/elasticache/parameter_group.rb new file mode 100644 index 0000000000..67054d702d --- /dev/null +++ b/lib/fog/aws/models/elasticache/parameter_group.rb @@ -0,0 +1,32 @@ +require 'fog/core/model' + +module Fog + module AWS + class Elasticache + + class ParameterGroup < Fog::Model + + identity :id, :aliases => 'CacheParameterGroupName' + attribute :description, :aliases => 'Description' + attribute :family, :aliases => 'CacheParameterGroupFamily' + + def destroy + requires :id + connection.delete_cache_parameter_group(id) + true + end + + def save + requires :id + connection.create_cache_parameter_group( + id, + description = id, + family = 'memcached1.4' + ) + end + + end + + end + end +end diff --git a/lib/fog/aws/models/elasticache/parameter_groups.rb b/lib/fog/aws/models/elasticache/parameter_groups.rb new file mode 100644 index 0000000000..2eceef4596 --- /dev/null +++ b/lib/fog/aws/models/elasticache/parameter_groups.rb @@ -0,0 +1,30 @@ +require 'fog/core/collection' +require 'fog/aws/models/elasticache/parameter_group' + +module Fog + module AWS + class Elasticache + + class ParameterGroups < Fog::Collection + model Fog::AWS::Elasticache::ParameterGroup + + def all + load( + connection.describe_cache_parameter_groups.body['CacheParameterGroups'] + ) + end + + def get(identity) + new( + connection.describe_cache_parameter_groups( + identity + ).body['CacheParameterGroups'].first + ) + rescue Fog::AWS::Elasticache::NotFound + nil + end + end + + end + end +end diff --git a/lib/fog/aws/models/elasticache/security_group.rb b/lib/fog/aws/models/elasticache/security_group.rb new file mode 100644 index 0000000000..306f956249 --- /dev/null +++ b/lib/fog/aws/models/elasticache/security_group.rb @@ -0,0 +1,52 @@ +require 'fog/core/model' + +module Fog + module AWS + class Elasticache + + class SecurityGroup < Fog::Model + + identity :id, :aliases => 'CacheSecurityGroupName' + attribute :description, :aliases => 'Description' + attribute :ec2_groups, :aliases => 'EC2SecurityGroups', :type => :array + attribute :owner_id, :aliases => 'OwnerId' + + def ready? + ec2_groups.all?{|ingress| ingress['Status'] == 'authorized'} + end + + def destroy + requires :id + connection.delete_cache_security_group(id) + true + end + + def save + requires :id + requires :description + connection.create_cache_security_group(id, description) + end + + def authorize_ec2_group(group_name, group_owner_id=owner_id) + requires :id + requires :owner_id if group_owner_id.nil? + data = connection.authorize_cache_security_group_ingress( + id, group_name, group_owner_id + ) + merge_attributes(data.body['CacheSecurityGroup']) + end + + def revoke_ec2_group(group_name, group_owner_id=owner_id) + requires :id + requires :owner_id if group_owner_id.nil? + data = connection.revoke_cache_security_group_ingress( + id, group_name, group_owner_id + ) + merge_attributes(data.body['CacheSecurityGroup']) + end + + end + + end + end +end diff --git a/lib/fog/aws/models/elasticache/security_groups.rb b/lib/fog/aws/models/elasticache/security_groups.rb new file mode 100644 index 0000000000..2578ff67bd --- /dev/null +++ b/lib/fog/aws/models/elasticache/security_groups.rb @@ -0,0 +1,30 @@ +require 'fog/core/collection' +require 'fog/aws/models/elasticache/security_group' + +module Fog + module AWS + class Elasticache + + class SecurityGroups < Fog::Collection + model Fog::AWS::Elasticache::SecurityGroup + + def all + load( + connection.describe_cache_security_groups.body['CacheSecurityGroups'] + ) + end + + def get(identity) + new( + connection.describe_cache_security_groups( + identity + ).body['CacheSecurityGroups'].first + ) + rescue Fog::AWS::Elasticache::NotFound + nil + end + end + + end + end +end diff --git a/lib/fog/aws/models/storage/file.rb b/lib/fog/aws/models/storage/file.rb index ce6ce83077..4ba3ff5cfe 100644 --- a/lib/fog/aws/models/storage/file.rb +++ b/lib/fog/aws/models/storage/file.rb @@ -104,7 +104,7 @@ def public_url def save(options = {}) requires :body, :directory, :key if options != {} - Fog::Logger.warning("options param is deprecated, use acl= instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("options param is deprecated, use acl= instead [light_black](#{caller.first})[/]") end options['x-amz-acl'] ||= @acl if @acl options['Cache-Control'] = cache_control if cache_control diff --git a/lib/fog/aws/parsers/cloud_formation/update_stack.rb b/lib/fog/aws/parsers/cloud_formation/update_stack.rb new file mode 100644 index 0000000000..3e3f528abe --- /dev/null +++ b/lib/fog/aws/parsers/cloud_formation/update_stack.rb @@ -0,0 +1,19 @@ +module Fog + module Parsers + module AWS + module CloudFormation + + class UpdateStack < Fog::Parsers::Base + + def end_element(name) + case name + when 'RequestId', 'StackId' + @response[name] = value + end + end + + end + end + end + end +end diff --git a/lib/fog/aws/parsers/cloud_watch/delete_alarms.rb b/lib/fog/aws/parsers/cloud_watch/delete_alarms.rb new file mode 100644 index 0000000000..8a0999b968 --- /dev/null +++ b/lib/fog/aws/parsers/cloud_watch/delete_alarms.rb @@ -0,0 +1,26 @@ +module Fog + module Parsers + module AWS + module CloudWatch + + class DeleteAlarms < Fog::Parsers::Base + + def reset + @response = { 'ResponseMetadata' => {} } + end + + def start_element(name, attrs = []) + super + end + + def end_element(name) + case name + when 'RequestId' + @response['ResponseMetadata'][name] = @value + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/cloud_watch/describe_alarm_history.rb b/lib/fog/aws/parsers/cloud_watch/describe_alarm_history.rb new file mode 100644 index 0000000000..35aa113bfd --- /dev/null +++ b/lib/fog/aws/parsers/cloud_watch/describe_alarm_history.rb @@ -0,0 +1,40 @@ +module Fog + module Parsers + module AWS + module CloudWatch + + class DescribeAlarmHistory < Fog::Parsers::Base + + def reset + @response = { 'DescribeAlarmHistoryResult' => {'AlarmHistoryItems' => []}, 'ResponseMetadata' => {} } + reset_alarm_history_item + end + + def reset_alarm_history_item + @alarm_history_item = {} + end + + def start_element(name, attrs = []) + super + end + + def end_element(name) + case name + when 'AlarmName', 'HistoryItemType', 'HistorySummary' + @alarm_history_item[name] = value + when 'Timestamp' + @alarm_history_item[name] = Time.parse value + when 'RequestId' + @response['ResponseMetadata'][name] = value + when 'NextToken' + @response['ResponseMetadata'][name] = value + when 'member' + @response['DescribeAlarmHistoryResult']['AlarmHistoryItems'] << @alarm_history_item + reset_alarm_history_item + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/cloud_watch/describe_alarms.rb b/lib/fog/aws/parsers/cloud_watch/describe_alarms.rb new file mode 100644 index 0000000000..99ad477a6e --- /dev/null +++ b/lib/fog/aws/parsers/cloud_watch/describe_alarms.rb @@ -0,0 +1,73 @@ +module Fog + module Parsers + module AWS + module CloudWatch + + class DescribeAlarms < Fog::Parsers::Base + + def reset + @response = { 'DescribeAlarmsResult' => {'MetricAlarms' => []}, 'ResponseMetadata' => {} } + reset_metric_alarms + end + + def reset_metric_alarms + @metric_alarms = {'Dimensions' => []} + end + + def reset_dimension + @dimension = {} + end + + def start_element(name, attrs = []) + super + case name + when 'Dimensions' + @in_dimensions = true + when 'member' + if @in_dimensions + reset_dimension + end + end + end + + def end_element(name) + case name + when 'Name', 'Value' + @dimension[name] = value + when 'AlarmConfigurationUpdatedTimestamp', 'StateUpdatedTimestamp' + @metric_alarms[name] = Time.parse value + when 'Period', 'EvaluationPeriods' + @metric_alarms[name] = value.to_i + when 'Threshold' + @metric_alarms[name] = value.to_f + when 'AlarmActions', 'OKActions', 'InsufficientDataActions' + @metric_alarms[name] = value.to_s.strip + when 'AlarmName', 'Namespace', 'MetricName', 'AlarmDescription', 'AlarmArn', 'Unit', + 'StateValue', 'Statistic', 'ComparisonOperator', 'StateReason', 'ActionsEnabled' + @metric_alarms[name] = value + when 'StateUpdatedTimestamp', 'AlarmConfigurationUpdatedTimestamp' + @metric_alarms[name] = Time.parse value + when 'Dimensions' + @in_dimensions = false + when 'RequestId' + @response['ResponseMetadata'][name] = value + when 'NextToken' + @response['ResponseMetadata'][name] = value + when 'member' + if !@in_dimensions + if @metric_alarms.has_key?('AlarmName') + @response['DescribeAlarmsResult']['MetricAlarms'] << @metric_alarms + reset_metric_alarms + elsif @response['DescribeAlarmsResult']['MetricAlarms'].last != nil + @response['DescribeAlarmsResult']['MetricAlarms'].last.merge!( @metric_alarms) + end + else + @metric_alarms['Dimensions'] << @dimension + end + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/cloud_watch/describe_alarms_for_metric.rb b/lib/fog/aws/parsers/cloud_watch/describe_alarms_for_metric.rb new file mode 100644 index 0000000000..a8358c4da7 --- /dev/null +++ b/lib/fog/aws/parsers/cloud_watch/describe_alarms_for_metric.rb @@ -0,0 +1,71 @@ +module Fog + module Parsers + module AWS + module CloudWatch + + class DescribeAlarmsForMetric < Fog::Parsers::Base + + def reset + @response = { 'DescribeAlarmsForMetricResult' => {'MetricAlarms' => []}, 'ResponseMetadata' => {} } + reset_metric_alarms + end + + def reset_metric_alarms + @metric_alarms = {'Dimensions' => []} + end + + def reset_dimension + @dimension = {} + end + + def start_element(name, attrs = []) + super + case name + when 'Dimensions' + @in_dimensions = true + when 'member' + if @in_dimensions + reset_dimension + end + end + end + + def end_element(name) + case name + when 'Name', 'Value' + @dimension[name] = value + when 'Period', 'EvaluationPeriods' + @metric_alarms[name] = value.to_i + when 'Threshold' + @metric_alarms[name] = value.to_f + when 'AlarmActions', 'OKActions', 'InsufficientDataActions' + @metric_alarms[name] = value.to_s.strip + when 'AlarmName', 'Namespace', 'MetricName', 'AlarmDescription', 'AlarmArn', 'Unit', + 'StateValue', 'Statistic', 'ComparisonOperator', 'StateReason', 'ActionsEnabled' + @metric_alarms[name] = value + when 'StateUpdatedTimestamp', 'AlarmConfigurationUpdatedTimestamp' + @metric_alarms[name] = Time.parse value + when 'Dimensions' + @in_dimensions = false + when 'NextToken' + @response['ResponseMetadata'][name] = value + when 'RequestId' + @response['ResponseMetadata'][name] = value + when 'member' + if !@in_dimensions + if @metric_alarms.has_key?('AlarmName') + @response['DescribeAlarmsForMetricResult']['MetricAlarms'] << @metric_alarms + reset_metric_alarms + elsif @response['DescribeAlarmsForMetricResult']['MetricAlarms'].last != nil + @response['DescribeAlarmsForMetricResult']['MetricAlarms'].last.merge!( @metric_alarms) + end + else + @metric_alarms['Dimensions'] << @dimension + end + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/cloud_watch/disable_alarm_actions.rb b/lib/fog/aws/parsers/cloud_watch/disable_alarm_actions.rb new file mode 100644 index 0000000000..7700e06984 --- /dev/null +++ b/lib/fog/aws/parsers/cloud_watch/disable_alarm_actions.rb @@ -0,0 +1,26 @@ +module Fog + module Parsers + module AWS + module CloudWatch + + class DisableAlarmActions < Fog::Parsers::Base + + def reset + @response = { 'ResponseMetadata' => {} } + end + + def start_element(name, attrs = []) + super + end + + def end_element(name) + case name + when 'RequestId' + @response['ResponseMetadata'][name] = value + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/cloud_watch/enable_alarm_actions.rb b/lib/fog/aws/parsers/cloud_watch/enable_alarm_actions.rb new file mode 100644 index 0000000000..f14dc362e2 --- /dev/null +++ b/lib/fog/aws/parsers/cloud_watch/enable_alarm_actions.rb @@ -0,0 +1,26 @@ +module Fog + module Parsers + module AWS + module CloudWatch + + class EnableAlarmActions < Fog::Parsers::Base + + def reset + @response = { 'ResponseMetadata' => {} } + end + + def start_element(name, attrs = []) + super + end + + def end_element(name) + case name + when 'RequestId' + @response['ResponseMetadata'][name] = value + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/cloud_watch/get_metric_statistics.rb b/lib/fog/aws/parsers/cloud_watch/get_metric_statistics.rb index 9f802f2d6f..167303d7f3 100644 --- a/lib/fog/aws/parsers/cloud_watch/get_metric_statistics.rb +++ b/lib/fog/aws/parsers/cloud_watch/get_metric_statistics.rb @@ -40,3 +40,4 @@ def end_element(name) end end end + diff --git a/lib/fog/aws/parsers/cloud_watch/put_metric_alarm.rb b/lib/fog/aws/parsers/cloud_watch/put_metric_alarm.rb new file mode 100644 index 0000000000..f5c3d2cb41 --- /dev/null +++ b/lib/fog/aws/parsers/cloud_watch/put_metric_alarm.rb @@ -0,0 +1,26 @@ +module Fog + module Parsers + module AWS + module CloudWatch + + class PutMetricAlarm < Fog::Parsers::Base + + def reset + @response = { 'ResponseMetadata' => {} } + end + + def start_element(name, attrs = []) + super + end + + def end_element(name) + case name + when 'RequestId' + @response['ResponseMetadata'][name] = value.strip + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/cloud_watch/set_alarm_state.rb b/lib/fog/aws/parsers/cloud_watch/set_alarm_state.rb new file mode 100644 index 0000000000..3b3564395b --- /dev/null +++ b/lib/fog/aws/parsers/cloud_watch/set_alarm_state.rb @@ -0,0 +1,26 @@ +module Fog + module Parsers + module AWS + module CloudWatch + + class SetAlarmState < Fog::Parsers::Base + + def reset + @response = { 'ResponseMetadata' => {} } + end + + def start_element(name, attrs = []) + super + end + + def end_element(name) + case name + when 'RequestId' + @response['ResponseMetadata'][name] = value + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/compute/spot_instance_requests.rb b/lib/fog/aws/parsers/compute/spot_instance_requests.rb index a55594069a..e6b0ecf228 100644 --- a/lib/fog/aws/parsers/compute/spot_instance_requests.rb +++ b/lib/fog/aws/parsers/compute/spot_instance_requests.rb @@ -6,7 +6,7 @@ module AWS class SpotInstanceRequests < Fog::Parsers::Base def reset - @block_device_mapping = [] + @block_device_mapping = {} @context = [] @contexts = ['blockDeviceMapping', 'groupSet'] @spot_instance_request = { 'launchSpecification' => { 'blockDeviceMapping' => [], 'groupSet' => [] } } @@ -42,7 +42,7 @@ def end_element(name) when 'item' case @context.last when 'blockDeviceMapping' - @instance['blockDeviceMapping'] << @block_device_mapping + @spot_instance_request['launchSpecification']['blockDeviceMapping'] << @block_device_mapping @block_device_mapping = {} when nil @response['spotInstanceRequestSet'] << @spot_instance_request diff --git a/lib/fog/aws/parsers/aws/change_resource_record_sets.rb b/lib/fog/aws/parsers/dns/change_resource_record_sets.rb similarity index 100% rename from lib/fog/aws/parsers/aws/change_resource_record_sets.rb rename to lib/fog/aws/parsers/dns/change_resource_record_sets.rb diff --git a/lib/fog/aws/parsers/aws/create_hosted_zone.rb b/lib/fog/aws/parsers/dns/create_hosted_zone.rb similarity index 100% rename from lib/fog/aws/parsers/aws/create_hosted_zone.rb rename to lib/fog/aws/parsers/dns/create_hosted_zone.rb diff --git a/lib/fog/aws/parsers/aws/delete_hosted_zone.rb b/lib/fog/aws/parsers/dns/delete_hosted_zone.rb similarity index 100% rename from lib/fog/aws/parsers/aws/delete_hosted_zone.rb rename to lib/fog/aws/parsers/dns/delete_hosted_zone.rb diff --git a/lib/fog/aws/parsers/aws/get_change.rb b/lib/fog/aws/parsers/dns/get_change.rb similarity index 100% rename from lib/fog/aws/parsers/aws/get_change.rb rename to lib/fog/aws/parsers/dns/get_change.rb diff --git a/lib/fog/aws/parsers/aws/get_hosted_zone.rb b/lib/fog/aws/parsers/dns/get_hosted_zone.rb similarity index 100% rename from lib/fog/aws/parsers/aws/get_hosted_zone.rb rename to lib/fog/aws/parsers/dns/get_hosted_zone.rb diff --git a/lib/fog/aws/parsers/aws/list_hosted_zones.rb b/lib/fog/aws/parsers/dns/list_hosted_zones.rb similarity index 100% rename from lib/fog/aws/parsers/aws/list_hosted_zones.rb rename to lib/fog/aws/parsers/dns/list_hosted_zones.rb diff --git a/lib/fog/aws/parsers/aws/list_resource_record_sets.rb b/lib/fog/aws/parsers/dns/list_resource_record_sets.rb similarity index 100% rename from lib/fog/aws/parsers/aws/list_resource_record_sets.rb rename to lib/fog/aws/parsers/dns/list_resource_record_sets.rb diff --git a/lib/fog/aws/parsers/elasticache/authorize_cache_security_group_ingress.rb b/lib/fog/aws/parsers/elasticache/authorize_cache_security_group_ingress.rb new file mode 100644 index 0000000000..c3aaf99d86 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/authorize_cache_security_group_ingress.rb @@ -0,0 +1,26 @@ +module Fog + module Parsers + module AWS + module Elasticache + + require 'fog/aws/parsers/elasticache/security_group_parser' + + class AuthorizeCacheSecurityGroupIngress < Fog::Parsers::AWS::Elasticache::SecurityGroupParser + + def end_element(name) + case name + when 'CacheSecurityGroup' then + @response['CacheSecurityGroup'] = @security_group + reset_security_group + else + super + end + + end + + end + + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/base.rb b/lib/fog/aws/parsers/elasticache/base.rb new file mode 100644 index 0000000000..6e5238c012 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/base.rb @@ -0,0 +1,34 @@ +module Fog + module Parsers + module AWS + module Elasticache + + require 'fog/aws/parsers/elasticache/base' + + # Base parser for ResponseMetadata, RequestId + class Base < Fog::Parsers::Base + + def reset + super + @response = { 'ResponseMetadata' => {} } + end + + def start_element(name, attrs = []) + super + end + + def end_element(name) + case name + when 'RequestId' + @response['ResponseMetadata'][name] = value + else + super + end + end + + end + + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/cache_cluster_parser.rb b/lib/fog/aws/parsers/elasticache/cache_cluster_parser.rb new file mode 100644 index 0000000000..5c6b86cab1 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/cache_cluster_parser.rb @@ -0,0 +1,79 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/base' + + class CacheClusterParser < Base + + def reset + super + reset_cache_cluster + end + + def reset_cache_cluster + @cache_cluster = { + 'CacheSecurityGroups' => [], + 'CacheNodes' => [], + 'CacheParameterGroup' => {} + } + end + + def start_element(name, attrs = []) + super + case name + when 'CacheSecurityGroup'; then @security_group = {} + when 'CacheNode'; then @cache_node = {} + when 'PendingModifiedValues'; then @pending_values = {} + end + + end + + def end_element(name) + case name + when 'AutoMinorVersionUpgrade', 'CacheClusterId', + 'CacheClusterStatus', 'CacheNodeType', 'Engine', + 'PreferredAvailabilityZone', 'PreferredMaintenanceWindow' + @cache_cluster[name] = value + when 'EngineVersion', 'CacheNodeIdsToRemoves' + if @pending_values + @pending_values[name] = value ? value.strip : name + else + @cache_cluster[name] = value + end + when 'NumCacheNodes' + if @pending_values + @pending_values[name] = value.to_i + else + @cache_cluster[name] = value.to_i + end + when 'CacheClusterCreateTime' + @cache_cluster[name] = DateTime.parse(value) + when 'CacheSecurityGroup' + @cache_cluster["#{name}s"] << @security_group unless @security_group.empty? + when 'CacheSecurityGroupName', 'Status' + @security_group[name] = value + when 'CacheNode' + @cache_cluster["#{name}s"] << @cache_node unless @cache_node.empty? + @cache_node = nil + when'PendingModifiedValues' + @cache_cluster[name] = @pending_values + @pending_values = nil + when 'CacheNodeCreateTime', 'CacheNodeStatus', 'Address', + 'ParameterGroupStatus', 'Port', 'CacheNodeId' + if @cache_node + @cache_node[name] = value ? value.strip : name + elsif @pending_values + @pending_values[name] = value ? value.strip : name + end + when 'CacheNodeIdsToReboots', 'CacheParameterGroupName', 'ParameterApplyStatus' + @cache_cluster['CacheParameterGroup'][name] = value + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/describe_cache_clusters.rb b/lib/fog/aws/parsers/elasticache/describe_cache_clusters.rb new file mode 100644 index 0000000000..56a57bac9f --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/describe_cache_clusters.rb @@ -0,0 +1,27 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/cache_cluster_parser' + + class DescribeCacheClusters < CacheClusterParser + + def reset + super + @response['CacheClusters'] = [] + end + + def end_element(name) + case name + when 'CacheCluster' + @response["#{name}s"] << @cache_cluster + reset_cache_cluster + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/describe_cache_parameters.rb b/lib/fog/aws/parsers/elasticache/describe_cache_parameters.rb new file mode 100644 index 0000000000..28131ff7dd --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/describe_cache_parameters.rb @@ -0,0 +1,22 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/engine_defaults_parser' + + class DescribeCacheParameters < EngineDefaultsParser + + def end_element(name) + case name + when 'DescribeCacheParametersResult' + @response[name] = @engine_defaults + reset_engine_defaults + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/describe_engine_default_parameters.rb b/lib/fog/aws/parsers/elasticache/describe_engine_default_parameters.rb new file mode 100644 index 0000000000..90393671aa --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/describe_engine_default_parameters.rb @@ -0,0 +1,22 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/engine_defaults_parser' + + class DescribeEngineDefaultParameters < EngineDefaultsParser + + def end_element(name) + case name + when 'EngineDefaults' + @response[name] = @engine_defaults + reset_engine_defaults + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/describe_parameter_groups.rb b/lib/fog/aws/parsers/elasticache/describe_parameter_groups.rb new file mode 100644 index 0000000000..81040b7463 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/describe_parameter_groups.rb @@ -0,0 +1,27 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/parameter_group_parser' + + class DescribeParameterGroups < ParameterGroupParser + + def reset + super + @response['CacheParameterGroups'] = [] + end + + def end_element(name) + case name + when 'CacheParameterGroup' + @response["#{name}s"] << @parameter_group + reset_parameter_group + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/describe_security_groups.rb b/lib/fog/aws/parsers/elasticache/describe_security_groups.rb new file mode 100644 index 0000000000..70942a4e15 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/describe_security_groups.rb @@ -0,0 +1,27 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/security_group_parser' + + class DescribeSecurityGroups < SecurityGroupParser + + def reset + super + @response['CacheSecurityGroups'] = [] + end + + def end_element(name) + case name + when 'CacheSecurityGroup' + @response["#{name}s"] << @security_group + reset_security_group + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/engine_defaults_parser.rb b/lib/fog/aws/parsers/elasticache/engine_defaults_parser.rb new file mode 100644 index 0000000000..7ec652a5b9 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/engine_defaults_parser.rb @@ -0,0 +1,59 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/base' + + class EngineDefaultsParser < Base + + def reset + super + reset_engine_defaults + end + + def reset_engine_defaults + @engine_defaults = { + 'CacheNodeTypeSpecificParameters' => [], + 'Parameters' => [], + } + end + + def start_element(name, attrs = []) + case name + when 'CacheNodeTypeSpecificParameter', 'Parameter' + @parameter = {} + when 'CacheNodeTypeSpecificValues' + @parameter[name] = [] + when 'CacheNodeTypeSpecificValue' + @node_specific_value = {} + else + super + end + end + + def end_element(name) + case name + when 'CacheParameterGroupFamily' + @engine_defaults[name] = value + when 'CacheNodeTypeSpecificParameter', 'Parameter' + if not @parameter.empty? + @engine_defaults["#{name}s"] << @parameter + end + when 'AllowedValues', 'DataType', 'Description', 'IsModifiable', + 'MinimumEngineVersion', 'ParameterName', 'Source' + @parameter[name] = value + when 'CacheNodeType', 'Value' + @node_specific_value[name] = value + when 'CacheNodeTypeSpecificValue' + if not @node_specific_value.empty? + @parameter["#{name}s"] << @node_specific_value + end + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/event_list.rb b/lib/fog/aws/parsers/elasticache/event_list.rb new file mode 100644 index 0000000000..87d932fc19 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/event_list.rb @@ -0,0 +1,38 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/base' + + class EventListParser < Base + + def reset + super + @response['Events'] = [] + end + + def start_element(name, attrs = []) + super + case name + when 'Event'; then @event = {} + end + + end + + def end_element(name) + case name + when 'Date' + @event[name] = DateTime.parse(value.strip) + when 'Message', 'SourceIdentifier', 'SourceType' + @event[name] = value ? value.strip : name + when 'Event' + @response['Events'] << @event unless @event.empty? + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/modify_parameter_group.rb b/lib/fog/aws/parsers/elasticache/modify_parameter_group.rb new file mode 100644 index 0000000000..b15aa05869 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/modify_parameter_group.rb @@ -0,0 +1,27 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/parameter_group_parser' + + class ModifyParameterGroup < ParameterGroupParser + + def reset + super + @response['ModifyCacheParameterGroupResult'] = [] + end + + def end_element(name) + case name + when 'ModifyCacheParameterGroupResult' + @response[name] = @parameter_group + reset_parameter_group + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/parameter_group_parser.rb b/lib/fog/aws/parsers/elasticache/parameter_group_parser.rb new file mode 100644 index 0000000000..d53f5e0f57 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/parameter_group_parser.rb @@ -0,0 +1,30 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/base' + + class ParameterGroupParser < Base + + def reset + super + reset_parameter_group + end + + def reset_parameter_group + @parameter_group = {} + end + + def end_element(name) + case name + when 'Description', 'CacheParameterGroupName', 'CacheParameterGroupFamily' + @parameter_group[name] = value + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/reset_parameter_group.rb b/lib/fog/aws/parsers/elasticache/reset_parameter_group.rb new file mode 100644 index 0000000000..9090e9be20 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/reset_parameter_group.rb @@ -0,0 +1,27 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/parameter_group_parser' + + class ResetParameterGroup < ParameterGroupParser + + def reset + super + @response['ResetCacheParameterGroupResult'] = [] + end + + def end_element(name) + case name + when 'ResetCacheParameterGroupResult' + @response[name] = @parameter_group + reset_parameter_group + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/security_group_parser.rb b/lib/fog/aws/parsers/elasticache/security_group_parser.rb new file mode 100644 index 0000000000..b33ec87e1d --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/security_group_parser.rb @@ -0,0 +1,40 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/base' + + class SecurityGroupParser < Fog::Parsers::Base + + def reset + super + reset_security_group + end + + def reset_security_group + @security_group = {'EC2SecurityGroups' => []} + end + + def start_element(name, attrs = []) + super + case name + when 'EC2SecurityGroup'; then @ec2_group = {} + end + + end + + def end_element(name) + case name + when 'Description', 'CacheSecurityGroupName', 'OwnerId' + @security_group[name] = value + when 'EC2SecurityGroup' + @security_group["#{name}s"] << @ec2_group unless @ec2_group.empty? + when 'EC2SecurityGroupName', 'EC2SecurityGroupOwnerId', 'Status' + @ec2_group[name] = value + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/single_cache_cluster.rb b/lib/fog/aws/parsers/elasticache/single_cache_cluster.rb new file mode 100644 index 0000000000..49673c7423 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/single_cache_cluster.rb @@ -0,0 +1,22 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/cache_cluster_parser' + + class SingleCacheCluster < CacheClusterParser + + def end_element(name) + case name + when 'CacheCluster' + @response[name] = @cache_cluster + reset_cache_cluster + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/single_parameter_group.rb b/lib/fog/aws/parsers/elasticache/single_parameter_group.rb new file mode 100644 index 0000000000..ad92002080 --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/single_parameter_group.rb @@ -0,0 +1,22 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/parameter_group_parser' + + class SingleParameterGroup < ParameterGroupParser + + def end_element(name) + case name + when 'CacheParameterGroup' + @response[name] = @parameter_group + reset_parameter_group + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/parsers/elasticache/single_security_group.rb b/lib/fog/aws/parsers/elasticache/single_security_group.rb new file mode 100644 index 0000000000..5d80778b0e --- /dev/null +++ b/lib/fog/aws/parsers/elasticache/single_security_group.rb @@ -0,0 +1,34 @@ +module Fog + module Parsers + module AWS + module Elasticache + require 'fog/aws/parsers/elasticache/security_group_parser' + + class SingleSecurityGroup < SecurityGroupParser + + def reset + super + @response = { 'ResponseMetadata' => {} } + end + + def start_element(name, attrs = []) + super + end + + def end_element(name) + case name + when 'CacheSecurityGroup' + @response[name] = @security_group + reset_security_group + + when 'RequestId' + @response['ResponseMetadata'][name] = value + else + super + end + end + end + end + end + end +end diff --git a/lib/fog/aws/requests/cloud_formation/create_stack.rb b/lib/fog/aws/requests/cloud_formation/create_stack.rb index ffd39cada0..4fa524b27f 100644 --- a/lib/fog/aws/requests/cloud_formation/create_stack.rb +++ b/lib/fog/aws/requests/cloud_formation/create_stack.rb @@ -17,6 +17,8 @@ class Real # * NotificationARNs<~Array>: List of SNS topics to publish events to # * Parameters<~Hash>: Hash of providers to supply to template # * TimeoutInMinutes<~Integer>: Minutes to wait before status is set to CREATE_FAILED + # * Capabilities<~Array>: List of capabilties the stack is granted. Currently CAPABILITY_IAM + # for allowing the creation of IAM resources # # ==== Returns # * response<~Excon::Response>: @@ -58,6 +60,10 @@ def create_stack(stack_name, options = {}) if options['TimeoutInMinutes'] params['TimeoutInMinutes'] = options['TimeoutInMinutes'] end + + if options['Capabilities'] + params.merge!(Fog::AWS.indexed_param("Capabilities.member", [*options['Capabilities']])) + end request({ 'Action' => 'CreateStack', diff --git a/lib/fog/aws/requests/cloud_formation/update_stack.rb b/lib/fog/aws/requests/cloud_formation/update_stack.rb new file mode 100644 index 0000000000..d54473cbb4 --- /dev/null +++ b/lib/fog/aws/requests/cloud_formation/update_stack.rb @@ -0,0 +1,62 @@ +module Fog + module AWS + class CloudFormation + class Real + + require 'fog/aws/parsers/cloud_formation/update_stack' + + # Update a stack + # + # ==== Parameters + # * stack_name<~String>: name of the stack to update + # * options<~Hash>: + # * TemplateBody<~String>: structure containing the template body + # or (one of the two Template parameters is required) + # * TemplateURL<~String>: URL of file containing the template body + # * Parameters<~Hash>: Hash of providers to supply to template + # * Capabilities<~Array>: List of capabilties the stack is granted. Currently CAPABILITY_IAM + # for allowing the creation of IAM resources + # + # ==== Returns + # * response<~Excon::Response>: + # * body<~Hash>: + # * 'StackId'<~String> - Id of the stack being updated + # + # ==== See Also + # http://docs.amazonwebservices.com/AWSCloudFormation/latest/APIReference/API_UpdateStack.html + # + def update_stack(stack_name, options = {}) + params = { + 'StackName' => stack_name, + } + + if options['Parameters'] + options['Parameters'].keys.each_with_index do |key, index| + index += 1 # params are 1-indexed + params.merge!({ + "Parameters.member.#{index}.ParameterKey" => key, + "Parameters.member.#{index}.ParameterValue" => options['Parameters'][key] + }) + end + end + + if options['TemplateBody'] + params['TemplateBody'] = options['TemplateBody'] + elsif options['TemplateURL'] + params['TemplateURL'] = options['TemplateURL'] + end + + if options['Capabilities'] + params.merge!(Fog::AWS.indexed_param("Capabilities.member", [*options['Capabilities']])) + end + + request({ + 'Action' => 'UpdateStack', + :parser => Fog::Parsers::AWS::CloudFormation::UpdateStack.new + }.merge!(params)) + end + + end + end + end +end diff --git a/lib/fog/aws/requests/cloud_watch/delete_alarms.rb b/lib/fog/aws/requests/cloud_watch/delete_alarms.rb new file mode 100644 index 0000000000..f94ef58f2f --- /dev/null +++ b/lib/fog/aws/requests/cloud_watch/delete_alarms.rb @@ -0,0 +1,30 @@ +module Fog + module AWS + class CloudWatch + class Real + + require 'fog/aws/parsers/cloud_watch/delete_alarms' + + # Delete a list of alarms + # ==== Options + # * AlarmNames<~Array>: A list of alarms to be deleted + # + # ==== Returns + # * response<~Excon::Response>: + # + # ==== See Also + # http://docs.amazonwebservices.com/AmazonCloudWatch/latest/APIReference/index.html?API_DeleteAlarms.html + # + + def delete_alarms(alarm_names) + options = {} + options.merge!(AWS.indexed_param('AlarmNames.member.%d', [*alarm_names])) + request({ + 'Action' => 'DeleteAlarms', + :parser => Fog::Parsers::AWS::CloudWatch::DeleteAlarms.new + }.merge(options)) + end + end + end + end +end diff --git a/lib/fog/aws/requests/cloud_watch/describe_alarm_history.rb b/lib/fog/aws/requests/cloud_watch/describe_alarm_history.rb new file mode 100644 index 0000000000..fc20f96290 --- /dev/null +++ b/lib/fog/aws/requests/cloud_watch/describe_alarm_history.rb @@ -0,0 +1,33 @@ +module Fog + module AWS + class CloudWatch + class Real + + require 'fog/aws/parsers/cloud_watch/describe_alarm_history' + + # Retrieves history for the specified alarm + # ==== Options + # * AlarmName<~String>: The name of the alarm + # * EndDate<~DateTime>: The ending date to retrieve alarm history + # * HistoryItemType<~String>: The type of alarm histories to retrieve + # * MaxRecords<~Integer>: The maximum number of alarm history records to retrieve + # * NextToken<~String> The token returned by a previous call to indicate that there is more data available + # * StartData<~DateTime>: The starting date to retrieve alarm history + # + # ==== Returns + # * response<~Excon::Response>: + # + # ==== See Also + # http://docs.amazonwebservices.com/AmazonCloudWatch/latest/APIReference/index.html?API_DescribeAlarmHistory.html + # + + def describe_alarm_history(options={}) + request({ + 'Action' => 'DescribeAlarmHistory', + :parser => Fog::Parsers::AWS::CloudWatch::DescribeAlarmHistory.new + }.merge(options)) + end + end + end + end +end diff --git a/lib/fog/aws/requests/cloud_watch/describe_alarms.rb b/lib/fog/aws/requests/cloud_watch/describe_alarms.rb new file mode 100644 index 0000000000..b1e78892d8 --- /dev/null +++ b/lib/fog/aws/requests/cloud_watch/describe_alarms.rb @@ -0,0 +1,38 @@ +module Fog + module AWS + class CloudWatch + class Real + + require 'fog/aws/parsers/cloud_watch/describe_alarms' + + # Retrieves alarms with the specified names + # ==== Options + # * ActionPrefix<~String>: The action name prefix + # * AlarmNamePrefix<~String>: The alarm name prefix. + # AlarmNames cannot be specified if this parameter is specified + # * AlarmNames<~Array>: A list of alarm names to retrieve information for. + # * MaxRecords<~Integer>: The maximum number of alarm descriptions to retrieve + # * NextToken<~String>: The token returned by a previous call to indicate that there is more data available + # * NextToken<~String> The token returned by a previous call to indicate that there is more data available + # * StateValue<~String>: The state value to be used in matching alarms + # + # ==== Returns + # * response<~Excon::Response>: + # + # ==== See Also + # http://docs.amazonwebservices.com/AmazonCloudWatch/latest/APIReference/API_DescribeAlarms.html + # + + def describe_alarms(options={}) + if alarm_names = options.delete('AlarmNames') + options.merge!(AWS.indexed_param('AlarmNames.member.%d', [*alarm_names])) + end + request({ + 'Action' => 'DescribeAlarms', + :parser => Fog::Parsers::AWS::CloudWatch::DescribeAlarms.new + }.merge(options)) + end + end + end + end +end diff --git a/lib/fog/aws/requests/cloud_watch/describe_alarms_for_metric.rb b/lib/fog/aws/requests/cloud_watch/describe_alarms_for_metric.rb new file mode 100644 index 0000000000..db85c5958a --- /dev/null +++ b/lib/fog/aws/requests/cloud_watch/describe_alarms_for_metric.rb @@ -0,0 +1,39 @@ +module Fog + module AWS + class CloudWatch + class Real + + require 'fog/aws/parsers/cloud_watch/describe_alarms_for_metric' + + # Retrieves all alarms for a single metric + # ==== Options + # * Dimensions<~Array>: a list of dimensions to filter against + # Name : The name of the dimension + # Value : The value to filter against + # * MetricName<~String>: The name of the metric + # * Namespace<~String>: The namespace of the metric + # * Period<~Integer>: The period in seconds over which the statistic is applied + # * Statistics<~String>: The statistic for the metric + # * Unit<~String> The unit for the metric + # + # ==== Returns + # * response<~Excon::Response>: + # + # ==== See Also + # http://docs.amazonwebservices.com/AmazonCloudWatch/latest/APIReference/API_DescribeAlarms.html + # + + def describe_alarms_for_metric(options) + if dimensions = options.delete('Dimensions') + options.merge!(AWS.indexed_param('Dimensions.member.%d.Name', dimensions.collect {|dimension| dimension['Name']})) + options.merge!(AWS.indexed_param('Dimensions.member.%d.Value', dimensions.collect {|dimension| dimension['Value']})) + end + request({ + 'Action' => 'DescribeAlarmsForMetric', + :parser => Fog::Parsers::AWS::CloudWatch::DescribeAlarmsForMetric.new + }.merge(options)) + end + end + end + end +end diff --git a/lib/fog/aws/requests/cloud_watch/disable_alarm_actions.rb b/lib/fog/aws/requests/cloud_watch/disable_alarm_actions.rb new file mode 100644 index 0000000000..453ba4997b --- /dev/null +++ b/lib/fog/aws/requests/cloud_watch/disable_alarm_actions.rb @@ -0,0 +1,30 @@ +module Fog + module AWS + class CloudWatch + class Real + + require 'fog/aws/parsers/cloud_watch/disable_alarm_actions' + + # Disables actions for the specified alarms + # ==== Options + # * AlarmNames<~Array>: The names of the alarms to disable actions for + # + # ==== Returns + # * response<~Excon::Response>: + # + # ==== See Also + # http://docs.amazonwebservices.com/AmazonCloudWatch/latest/APIReference/API_DisableAlarmActions.html + # + + def disable_alarm_actions(alarm_names) + options = {} + options.merge!(AWS.indexed_param('AlarmNames.member.%d', [*alarm_names])) + request({ + 'Action' => 'DisableAlarmActions', + :parser => Fog::Parsers::AWS::CloudWatch::DisableAlarmActions.new + }.merge(options)) + end + end + end + end +end diff --git a/lib/fog/aws/requests/cloud_watch/enable_alarm_actions.rb b/lib/fog/aws/requests/cloud_watch/enable_alarm_actions.rb new file mode 100644 index 0000000000..2c7f81529b --- /dev/null +++ b/lib/fog/aws/requests/cloud_watch/enable_alarm_actions.rb @@ -0,0 +1,30 @@ +module Fog + module AWS + class CloudWatch + class Real + + require 'fog/aws/parsers/cloud_watch/enable_alarm_actions' + + # Enables actions for the specified alarms + # ==== Options + # * AlarmNames<~Array>: The names of the alarms to enable actions for + # + # ==== Returns + # * response<~Excon::Response>: + # + # ==== See Also + # http://docs.amazonwebservices.com/AmazonCloudWatch/latest/APIReference/API_EnableAlarmActions.html + # + + def enable_alarm_actions(alarm_names) + options = {} + options.merge!(AWS.indexed_param('AlarmNames.member.%d', [*alarm_names])) + request({ + 'Action' => 'EnableAlarmActions', + :parser => Fog::Parsers::AWS::CloudWatch::EnableAlarmActions.new + }.merge(options)) + end + end + end + end +end diff --git a/lib/fog/aws/requests/cloud_watch/put_metric_alarm.rb b/lib/fog/aws/requests/cloud_watch/put_metric_alarm.rb new file mode 100644 index 0000000000..5ea7e20759 --- /dev/null +++ b/lib/fog/aws/requests/cloud_watch/put_metric_alarm.rb @@ -0,0 +1,84 @@ +module Fog + module AWS + class CloudWatch + class Real + require 'fog/aws/parsers/cloud_watch/put_metric_alarm' + + # Creates or updates an alarm and associates it with the specified Amazon CloudWatch metric + # ==== Options + # * ActionsEnabled<~Boolean>: Indicates whether or not actions should be executed during any changes to the alarm's state + # * AlarmActions<~Array>: A list of actions to execute + # * AlarmDescription<~String>: The description for the alarm + # * AlarmName<~String> The unique name for the alarm + # * ComparisonOperator<~String>: The arithmetic operation to use for comparison + # * Dimensions<~Array>: a list of dimensions to filter against, + # Name : The name of the dimension + # Value : The value to filter against + # * EvaluationPeriods<~Integer>: The number of periods over which data is compared to the specified threshold + # * InsufficientDataActions<~Array>: A list of actions to execute + # * MetricName<~String>: The name for the alarm's associated metric + # * Namespace<~String>: The namespace for the alarm's associated metric + # * OKActions<~Array>: A list of actions to execute + # * Period<~Integer>: The period in seconds over which the specified statistic is applied + # * Statistic<~String>: The statistic to apply to the alarm's associated metric + # * Threshold<~Double>: The value against which the specified statistic is compared + # * Unit<~String>: The unit for the alarm's associated metric + # + # ==== Returns + # * response<~Excon::Response>: + # + # ==== See Also + # http://docs.amazonwebservices.com/AmazonCloudWatch/latest/APIReference/API_PutMetricAlarm.html + # + def put_metric_alarm(options) + if dimensions = options.delete('Dimensions') + options.merge!(AWS.indexed_param('Dimensions.member.%d.Name', dimensions.collect {|dimension| dimension['Name']})) + options.merge!(AWS.indexed_param('Dimensions.member.%d.Value', dimensions.collect {|dimension| dimension['Value']})) + end + if alarm_actions = options.delete('AlarmActions') + options.merge!(AWS.indexed_param('AlarmActions.member.%d', [*alarm_actions])) + end + if insufficient_data_actions = options.delete('InsufficientDataActions') + options.merge!(AWS.indexed_param('InsufficientDataActions.member.%d', [*insufficient_data_actions])) + end + if ok_actions = options.delete('OKActions') + options.merge!(AWS.indexed_param('OKActions.member.%d', [*ok_actions])) + end + + request({ + 'Action' => 'PutMetricAlarm', + :parser => Fog::Parsers::AWS::CloudWatch::PutMetricAlarm.new + }.merge(options)) + end + end + + class Mock + require 'fog/aws/parsers/cloud_watch/put_metric_alarm' + + # See: Fog::AWS::CloudWatch::Real.put_metric_alarm() + # + def put_metric_alarm(options) + supported_actions = [ "InsufficientDataActions", "OKActions", "AlarmActions" ] + found_actions = options.keys.select {|key| supported_actions.include? key } + if found_actions.empty? + raise Fog::Compute::AWS::Error.new("The request must contain at least one of #{supported_actions.join(", ")}'") + end + + requirements = [ "AlarmName", "ComparisonOperator", "EvaluationPeriods", "Namespace", "Period", "Statistic", "Threshold" ] + requirements.each do |req| + unless options.has_key?(req) + raise Fog::Compute::AWS::Error.new("The request must contain a the parameter '%s'" % req) + end + end + + response = Excon::Response.new + response.status = 200 + response.body = { + 'requestId' => Fog::AWS::Mock.request_id + } + response + end + end + end + end +end diff --git a/lib/fog/aws/requests/cloud_watch/set_alarm_state.rb b/lib/fog/aws/requests/cloud_watch/set_alarm_state.rb new file mode 100644 index 0000000000..cd004b005e --- /dev/null +++ b/lib/fog/aws/requests/cloud_watch/set_alarm_state.rb @@ -0,0 +1,31 @@ +module Fog + module AWS + class CloudWatch + class Real + + require 'fog/aws/parsers/cloud_watch/set_alarm_state' + + # Temporarily sets the state of an alarm + # ==== Options + # * AlarmName<~String>: The names of the alarm + # * StateReason<~String>: The reason that this alarm is set to this specific state (in human-readable text format) + # * StateReasonData<~String>: The reason that this alarm is set to this specific state (in machine-readable JSON format) + # * StateValue<~String>: The value of the state + # + # ==== Returns + # * response<~Excon::Response>: + # + # ==== See Also + # http://docs.amazonwebservices.com/AmazonCloudWatch/latest/APIReference/API_SetAlarmState.html + # + + def set_alarm_state(options) + request({ + 'Action' => 'SetAlarmState', + :parser => Fog::Parsers::AWS::CloudWatch::SetAlarmState.new + }.merge(options)) + end + end + end + end +end diff --git a/lib/fog/aws/requests/compute/authorize_security_group_ingress.rb b/lib/fog/aws/requests/compute/authorize_security_group_ingress.rb index 7b0d03a139..04d3ae717c 100644 --- a/lib/fog/aws/requests/compute/authorize_security_group_ingress.rb +++ b/lib/fog/aws/requests/compute/authorize_security_group_ingress.rb @@ -13,11 +13,23 @@ class Real # * 'SourceSecurityGroupName'<~String> - Name of security group to authorize # * 'SourceSecurityGroupOwnerId'<~String> - Name of owner to authorize # or - # * 'CidrIp' - CIDR range - # * 'FromPort' - Start of port range (or -1 for ICMP wildcard) - # * 'GroupName' - Name of group to modify - # * 'IpProtocol' - Ip protocol, must be in ['tcp', 'udp', 'icmp'] - # * 'ToPort' - End of port range (or -1 for ICMP wildcard) + # * 'CidrIp'<~String> - CIDR range + # * 'FromPort'<~Integer> - Start of port range (or -1 for ICMP wildcard) + # * 'IpProtocol'<~String> - Ip protocol, must be in ['tcp', 'udp', 'icmp'] + # * 'ToPort'<~Integer> - End of port range (or -1 for ICMP wildcard) + # or + # * 'IpPermissions'<~Array>: + # * permission<~Hash>: + # * 'FromPort'<~Integer> - Start of port range (or -1 for ICMP wildcard) + # * 'Groups'<~Array>: + # * group<~Hash>: + # * 'GroupName'<~String> - Name of security group to authorize + # * 'UserId'<~String> - Name of owner to authorize + # * 'IpProtocol'<~String> - Ip protocol, must be in ['tcp', 'udp', 'icmp'] + # * 'IpRanges'<~Array>: + # * ip_range<~Hash>: + # * 'CidrIp'<~String> - CIDR range + # * 'ToPort'<~Integer> - End of port range (or -1 for ICMP wildcard) # # === Returns # * response<~Excon::Response>: @@ -28,10 +40,15 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-AuthorizeSecurityGroupIngress.html] def authorize_security_group_ingress(group_name, options = {}) if group_name.is_a?(Hash) - Fog::Logger.warning("Fog::AWS::Compute#authorize_security_group_ingress now requires the 'group_name' parameter. Only specifying an options hash is now deprecated [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("Fog::AWS::Compute#authorize_security_group_ingress now requires the 'group_name' parameter. Only specifying an options hash is now deprecated [light_black](#{caller.first})[/]") options = group_name - group_name = options['GroupName'] + group_name = options.delete('GroupName') + end + + if ip_permissions = options.delete('IpPermissions') + options.merge!(indexed_ip_permissions_params(ip_permissions)) end + request({ 'Action' => 'AuthorizeSecurityGroupIngress', 'GroupName' => group_name, @@ -40,51 +57,69 @@ def authorize_security_group_ingress(group_name, options = {}) }.merge!(options)) end + private + + def indexed_ip_permissions_params(ip_permissions) + params = {} + ip_permissions.each_with_index do |permission, key_index| + key_index += 1 + params[format('IpPermissions.%d.IpProtocol', key_index)] = permission['IpProtocol'] + params[format('IpPermissions.%d.FromPort', key_index)] = permission['FromPort'] + params[format('IpPermissions.%d.ToPort', key_index)] = permission['ToPort'] + (permission['Groups'] || []).each_with_index do |group, group_index| + group_index += 1 + params[format('IpPermissions.%d.Groups.%d.UserId', key_index, group_index)] = group['UserId'] + params[format('IpPermissions.%d.Groups.%d.GroupName', key_index, group_index)] = group['GroupName'] + params[format('IpPermissions.%d.Groups.%d.GroupId', key_index, group_index)] = group['GroupId'] + end + (permission['IpRanges'] || []).each_with_index do |ip_range, range_index| + range_index += 1 + params[format('IpPermissions.%d.IpRanges.%d.CidrIp', key_index, range_index)] = ip_range['CidrIp'] + end + end + params.reject {|k, v| v.nil? } + end + end class Mock def authorize_security_group_ingress(group_name, options = {}) if group_name.is_a?(Hash) - Fog::Logger.warning("Fog::AWS::Compute#authorize_security_group_ingress now requires the 'group_name' parameter. Only specifying an options hash is now deprecated [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("Fog::AWS::Compute#authorize_security_group_ingress now requires the 'group_name' parameter. Only specifying an options hash is now deprecated [light_black](#{caller.first})[/]") options = group_name - group_name = options['GroupName'] + group_name = options.delete('GroupName') end + verify_permission_options(options) + response = Excon::Response.new group = self.data[:security_groups][group_name] if group - group['ipPermissions'] ||= [] - if group_name && source_group_name = options['SourceSecurityGroupName'] - ['tcp', 'udp'].each do |protocol| - group['ipPermissions'] << { - 'groups' => [{'groupName' => source_group_name, 'userId' => (options['SourceSecurityGroupOwnerId'] || self.data[:owner_id]) }], - 'fromPort' => 1, - 'ipRanges' => [], - 'ipProtocol' => protocol, - 'toPort' => 65535 - } + normalized_permissions = normalize_permissions(options) + + normalized_permissions.each do |permission| + if matching_group_permission = find_matching_permission(group, permission) + if permission['groups'].any? {|pg| matching_group_permission['groups'].include?(pg) } + raise Fog::Compute::AWS::Error, "InvalidPermission.Duplicate => The permission '123' has already been authorized in the specified group" + end + + if permission['ipRanges'].any? {|pr| matching_group_permission['ipRanges'].include?(pr) } + raise Fog::Compute::AWS::Error, "InvalidPermission.Duplicate => The permission '123' has already been authorized in the specified group" + end end - group['ipPermissions'] << { - 'groups' => [{'groupName' => source_group_name, 'userId' => (options['SourceSecurityGroupOwnerId'] || self.data[:owner_id]) }], - 'fromPort' => -1, - 'ipRanges' => [], - 'ipProtocol' => 'icmp', - 'toPort' => -1 - } - else - group['ipPermissions'] << { - 'groups' => [], - 'fromPort' => options['FromPort'], - 'ipRanges' => [], - 'ipProtocol' => options['IpProtocol'], - 'toPort' => options['ToPort'] - } - if options['CidrIp'] - group['ipPermissions'].last['ipRanges'] << { 'cidrIp' => options['CidrIp'] } + end + + normalized_permissions.each do |permission| + if matching_group_permission = find_matching_permission(group, permission) + matching_group_permission['groups'] += permission['groups'] + matching_group_permission['ipRanges'] += permission['ipRanges'] + else + group['ipPermissions'] << permission end end + response.status = 200 response.body = { 'requestId' => Fog::AWS::Mock.request_id, @@ -96,6 +131,76 @@ def authorize_security_group_ingress(group_name, options = {}) end end + private + + def verify_permission_options(options) + if options.empty? + raise Fog::Compute::AWS::Error.new("InvalidRequest => The request received was invalid.") + end + if options['IpProtocol'] && !['tcp', 'udp', 'icmp'].include?(options['IpProtocol']) + raise Fog::Compute::AWS::Error.new("InvalidPermission.Malformed => Unsupported IP protocol \"#{options['IpProtocol']}\" - supported: [tcp, udp, icmp]") + end + if options['IpProtocol'] && (!options['FromPort'] || !options['ToPort']) + raise Fog::Compute::AWS::Error.new("InvalidPermission.Malformed => TCP/UDP port (-1) out of range") + end + if options.has_key?('IpPermissions') + if !options['IpPermissions'].is_a?(Array) || options['IpPermissions'].empty? + raise Fog::Compute::AWS::Error.new("InvalidRequest => The request received was invalid.") + end + options['IpPermissions'].each {|p| verify_permission_options(p) } + end + end + + def normalize_permissions(options) + normalized_permissions = [] + + if options['SourceSecurityGroupName'] + ['tcp', 'udp'].each do |protocol| + normalized_permissions << { + 'ipProtocol' => protocol, + 'fromPort' => 1, + 'toPort' => 65535, + 'groups' => [{'groupName' => options['SourceSecurityGroupName'], 'userId' => options['SourceSecurityGroupOwnerId'] || self.data[:owner_id]}], + 'ipRanges' => [] + } + end + normalized_permissions << { + 'ipProtocol' => 'icmp', + 'fromPort' => -1, + 'toPort' => -1, + 'groups' => [{'groupName' => options['SourceSecurityGroupName'], 'userId' => options['SourceSecurityGroupOwnerId'] || self.data[:owner_id]}], + 'ipRanges' => [] + } + elsif options['CidrIp'] + normalized_permissions << { + 'ipProtocol' => options['IpProtocol'], + 'fromPort' => Integer(options['FromPort']), + 'toPort' => Integer(options['ToPort']), + 'groups' => [], + 'ipRanges' => [{'cidrIp' => options['CidrIp']}] + } + elsif options['IpPermissions'] + options['IpPermissions'].each do |permission| + normalized_permissions << { + 'ipProtocol' => permission['IpProtocol'], + 'fromPort' => Integer(permission['FromPort']), + 'toPort' => Integer(permission['ToPort']), + 'groups' => (permission['Groups'] || []).map {|g| {'groupName' => g['GroupName'], 'userId' => g['UserId'] || self.data[:owner_id]} }, + 'ipRanges' => (permission['IpRanges'] || []).map {|r| { 'cidrIp' => r['CidrIp'] } } + } + end + end + + normalized_permissions + end + + def find_matching_permission(group, permission) + group['ipPermissions'].detect {|group_permission| + permission['ipProtocol'] == group_permission['ipProtocol'] && + permission['fromPort'] == group_permission['fromPort'] && + permission['toPort'] == group_permission['toPort'] } + end + end end end diff --git a/lib/fog/aws/requests/compute/create_security_group.rb b/lib/fog/aws/requests/compute/create_security_group.rb index 0ace0ff64b..998ebd629f 100644 --- a/lib/fog/aws/requests/compute/create_security_group.rb +++ b/lib/fog/aws/requests/compute/create_security_group.rb @@ -35,10 +35,11 @@ def create_security_group(name, description) response = Excon::Response.new unless self.data[:security_groups][name] data = { - 'groupDescription' => description, - 'groupName' => name, - 'ipPermissions' => [], - 'ownerId' => self.data[:owner_id] + 'groupDescription' => description, + 'groupName' => name, + 'ipPermissionsEgress' => [], + 'ipPermissions' => [], + 'ownerId' => self.data[:owner_id] } self.data[:security_groups][name] = data response.body = { diff --git a/lib/fog/aws/requests/compute/create_snapshot.rb b/lib/fog/aws/requests/compute/create_snapshot.rb index 4c465125c9..1e7caa63e6 100644 --- a/lib/fog/aws/requests/compute/create_snapshot.rb +++ b/lib/fog/aws/requests/compute/create_snapshot.rb @@ -33,13 +33,13 @@ def create_snapshot(volume_id, description = nil) end class Mock - + # # Usage # # AWS[:compute].create_snapshot("vol-f7c23423", "latest snapshot") # - + def create_snapshot(volume_id, description = nil) response = Excon::Response.new if volume = self.data[:volumes][volume_id] @@ -59,7 +59,6 @@ def create_snapshot(volume_id, description = nil) response.body = { 'requestId' => Fog::AWS::Mock.request_id }.merge!(data) - self.data[:snapshots][snapshot_id]['tagSet'] = {} else response.status = 400 raise(Excon::Errors.status_error({:expects => 200}, response)) diff --git a/lib/fog/aws/requests/compute/create_tags.rb b/lib/fog/aws/requests/compute/create_tags.rb index c15d495747..2c47846823 100644 --- a/lib/fog/aws/requests/compute/create_tags.rb +++ b/lib/fog/aws/requests/compute/create_tags.rb @@ -53,7 +53,7 @@ def create_tags(resources, tags) when /^vol\-[a-z0-9]{8}$/i 'volume' end - if type && self.data[:"#{type}s"][resource_id] + if type && ((type == 'image' && visible_images[resource_id]) || self.data[:"#{type}s"][resource_id]) { 'resourceId' => resource_id, 'resourceType' => type } else raise(Fog::Service::NotFound.new("The #{type} ID '#{resource_id}' does not exist")) @@ -64,8 +64,10 @@ def create_tags(resources, tags) self.data[:tags][key] ||= {} self.data[:tags][key][value] ||= [] self.data[:tags][key][value] |= tagged - - tagged.each {|resource| self.data[:"#{resource['resourceType']}s"][resource['resourceId']]['tagSet'][key] = value} + + tagged.each do |resource| + self.data[:tag_sets][resource['resourceId']][key] = value + end end response = Excon::Response.new diff --git a/lib/fog/aws/requests/compute/create_volume.rb b/lib/fog/aws/requests/compute/create_volume.rb index 6c6e594ccd..84a0935f4c 100644 --- a/lib/fog/aws/requests/compute/create_volume.rb +++ b/lib/fog/aws/requests/compute/create_volume.rb @@ -60,7 +60,6 @@ def create_volume(availability_zone, size, snapshot_id = nil) 'size' => size, 'snapshotId' => snapshot_id, 'status' => 'creating', - 'tagSet' => {}, 'volumeId' => volume_id } self.data[:volumes][volume_id] = data diff --git a/lib/fog/aws/requests/compute/delete_security_group.rb b/lib/fog/aws/requests/compute/delete_security_group.rb index 0d70aad7be..41cd1a3b1b 100644 --- a/lib/fog/aws/requests/compute/delete_security_group.rb +++ b/lib/fog/aws/requests/compute/delete_security_group.rb @@ -30,8 +30,33 @@ def delete_security_group(name) class Mock def delete_security_group(name) + if name == 'default' + raise Fog::Compute::AWS::Error.new("InvalidGroup.Reserved => The security group 'default' is reserved") + end + response = Excon::Response.new if self.data[:security_groups][name] + + used_by_groups = [] + self.region_data.each do |access_key, key_data| + key_data[:security_groups].each do |group_name, group| + next if group == self.data[:security_groups][name] + + group['ipPermissions'].each do |group_ip_permission| + group_ip_permission['groups'].each do |group_group_permission| + if group_group_permission['groupName'] == name && + group_group_permission['userId'] == self.data[:owner_id] + used_by_groups << "#{key_data[:owner_id]}:#{group_name}" + end + end + end + end + end + + unless used_by_groups.empty? + raise Fog::Compute::AWS::Error.new("InvalidGroup.InUse => Group #{self.data[:owner_id]}:#{name} is used by groups: #{used_by_groups.uniq.join(" ")}") + end + self.data[:security_groups].delete(name) response.status = 200 response.body = { diff --git a/lib/fog/aws/requests/compute/delete_tags.rb b/lib/fog/aws/requests/compute/delete_tags.rb index 0f38c843f7..83ad0673df 100644 --- a/lib/fog/aws/requests/compute/delete_tags.rb +++ b/lib/fog/aws/requests/compute/delete_tags.rb @@ -28,7 +28,7 @@ def delete_tags(resources, tags) index += 1 # should start at 1 instead of 0 params.merge!("Tag.#{index}.Key" => key) unless tags[key].nil? - params.merge("Tag.#{index}.Value" => tags[key]) + params.merge!("Tag.#{index}.Value" => tags[key]) end end @@ -53,7 +53,7 @@ def delete_tags(resources, tags) when /^vol\-[a-z0-9]{8}$/i 'volume' end - if type && self.data[:"#{type}s"][resource_id] + if type && ((type == 'image' && visible_images[resource_id]) || self.data[:"#{type}s"][resource_id]) { 'resourceId' => resource_id, 'resourceType' => type } else raise(Fog::Service::NotFound.new("The #{type} ID '#{resource_id}' does not exist")) @@ -65,9 +65,8 @@ def delete_tags(resources, tags) end tagged.each do |resource| - object = self.data[:"#{resource['resourceType']}s"][resource['resourceId']] tags.each do |key, value| - tagset = object['tagSet'] + tagset = self.data[:tag_sets][resource['resourceId']] tagset.delete(key) if tagset.has_key?(key) && (value.nil? || tagset[key] == value) end end diff --git a/lib/fog/aws/requests/compute/describe_addresses.rb b/lib/fog/aws/requests/compute/describe_addresses.rb index 266cb749a8..14f9605436 100644 --- a/lib/fog/aws/requests/compute/describe_addresses.rb +++ b/lib/fog/aws/requests/compute/describe_addresses.rb @@ -21,7 +21,7 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeAddresses.html] def describe_addresses(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_addresses with #{filters.class} param is deprecated, use describe_addresses('public-ip' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_addresses with #{filters.class} param is deprecated, use describe_addresses('public-ip' => []) instead [light_black](#{caller.first})[/]") filters = {'public-ip' => [*filters]} end params = Fog::AWS.indexed_filters(filters) @@ -38,7 +38,7 @@ class Mock def describe_addresses(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_addresses with #{filters.class} param is deprecated, use describe_addresses('public-ip' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_addresses with #{filters.class} param is deprecated, use describe_addresses('public-ip' => []) instead [light_black](#{caller.first})[/]") filters = {'public-ip' => [*filters]} end diff --git a/lib/fog/aws/requests/compute/describe_availability_zones.rb b/lib/fog/aws/requests/compute/describe_availability_zones.rb index 6ea9339f3c..e578d30df9 100644 --- a/lib/fog/aws/requests/compute/describe_availability_zones.rb +++ b/lib/fog/aws/requests/compute/describe_availability_zones.rb @@ -22,7 +22,7 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeAvailabilityZones.html] def describe_availability_zones(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_availability_zones with #{filters.class} param is deprecated, use describe_availability_zones('zone-name' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_availability_zones with #{filters.class} param is deprecated, use describe_availability_zones('zone-name' => []) instead [light_black](#{caller.first})[/]") filters = {'public-ip' => [*filters]} end params = Fog::AWS.indexed_filters(filters) @@ -39,7 +39,7 @@ class Mock def describe_availability_zones(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_availability_zones with #{filters.class} param is deprecated, use describe_availability_zones('zone-name' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_availability_zones with #{filters.class} param is deprecated, use describe_availability_zones('zone-name' => []) instead [light_black](#{caller.first})[/]") filters = {'public-ip' => [*filters]} end diff --git a/lib/fog/aws/requests/compute/describe_images.rb b/lib/fog/aws/requests/compute/describe_images.rb index 7d770da3ff..8b4f9219ef 100644 --- a/lib/fog/aws/requests/compute/describe_images.rb +++ b/lib/fog/aws/requests/compute/describe_images.rb @@ -60,7 +60,7 @@ class Mock def describe_images(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_images with #{filters.class} param is deprecated, use describe_images('image-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_images with #{filters.class} param is deprecated, use describe_images('image-id' => []) instead [light_black](#{caller.first})[/]") filters = {'image-id' => [*filters]} end @@ -98,15 +98,12 @@ def describe_images(filters = {}) 'virtualization-type' => 'virtualizationType' } - image_set = self.data[:images].values + image_set = visible_images.values + image_set = apply_tag_filters(image_set, filters, 'imageId') for filter_key, filter_value in filters - if tag_key = filter_key.split('tag:')[1] - image_set = image_set.reject{|image| ![*filter_value].include?(image['tagSet'][tag_key])} - else - aliased_key = aliases[filter_key] - image_set = image_set.reject{|image| ![*filter_value].include?(image[aliased_key])} - end + aliased_key = aliases[filter_key] + image_set = image_set.reject{|image| ![*filter_value].include?(image[aliased_key])} end image_set = image_set.map do |image| @@ -116,7 +113,7 @@ def describe_images(filters = {}) image['imageState'] = 'available' end end - image.reject { |key, value| ['registered'].include?(key) } + image.reject { |key, value| ['registered'].include?(key) }.merge('tagSet' => self.data[:tag_sets][image['imageId']]) end response.status = 200 diff --git a/lib/fog/aws/requests/compute/describe_instances.rb b/lib/fog/aws/requests/compute/describe_instances.rb index 580072ecae..302d63d3b8 100644 --- a/lib/fog/aws/requests/compute/describe_instances.rb +++ b/lib/fog/aws/requests/compute/describe_instances.rb @@ -55,7 +55,7 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeInstances.html] def describe_instances(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_instances with #{filters.class} param is deprecated, use describe_instances('instance-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_instances with #{filters.class} param is deprecated, use describe_instances('instance-id' => []) instead [light_black](#{caller.first})[/]") filters = {'instance-id' => [*filters]} end params = {} @@ -81,14 +81,14 @@ class Mock def describe_instances(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_instances with #{filters.class} param is deprecated, use describe_instances('instance-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_instances with #{filters.class} param is deprecated, use describe_instances('instance-id' => []) instead [light_black](#{caller.first})[/]") filters = {'instance-id' => [*filters]} end response = Excon::Response.new instance_set = self.data[:instances].values - instance_set = apply_tag_filters(instance_set, filters) + instance_set = apply_tag_filters(instance_set, filters, 'instanceId') aliases = { 'architecture' => 'architecture', @@ -154,13 +154,26 @@ def describe_instances(filters = {}) end end + brand_new_instances = instance_set.find_all do |instance| + instance['instanceState']['name'] == 'pending' && + Time.now - instance['launchTime'] < Fog::Mock.delay * 2 + end + + # Error if filtering for a brand new instance directly + if (filters['instance-id'] || filters['instanceId']) && !brand_new_instances.empty? + raise Fog::Compute::AWS::NotFound.new("The instance ID '#{brand_new_instances.first['instanceId']}' does not exist") + end + + # Otherwise don't include it in the list + instance_set = instance_set.reject {|instance| brand_new_instances.include?(instance) } + response.status = 200 reservation_set = {} instance_set.each do |instance| case instance['instanceState']['name'] when 'pending' - if Time.now - instance['launchTime'] >= Fog::Mock.delay + if Time.now - instance['launchTime'] >= Fog::Mock.delay * 2 instance['ipAddress'] = Fog::AWS::Mock.ip_address instance['originalIpAddress'] = instance['ipAddress'] instance['dnsName'] = Fog::AWS::Mock.dns_name_for(instance['ipAddress']) @@ -192,7 +205,7 @@ def describe_instances(filters = {}) 'ownerId' => instance['ownerId'], 'reservationId' => instance['reservationId'] } - reservation_set[instance['reservationId']]['instancesSet'] << instance.reject{|key,value| !['amiLaunchIndex', 'architecture', 'blockDeviceMapping', 'clientToken', 'dnsName', 'imageId', 'instanceId', 'instanceState', 'instanceType', 'ipAddress', 'kernelId', 'keyName', 'launchTime', 'monitoring', 'placement', 'platform', 'privateDnsName', 'privateIpAddress', 'productCodes', 'ramdiskId', 'reason', 'rootDeviceType', 'stateReason', 'tagSet'].include?(key)} + reservation_set[instance['reservationId']]['instancesSet'] << instance.reject{|key,value| !['amiLaunchIndex', 'architecture', 'blockDeviceMapping', 'clientToken', 'dnsName', 'imageId', 'instanceId', 'instanceState', 'instanceType', 'ipAddress', 'kernelId', 'keyName', 'launchTime', 'monitoring', 'placement', 'platform', 'privateDnsName', 'privateIpAddress', 'productCodes', 'ramdiskId', 'reason', 'rootDeviceType', 'stateReason'].include?(key)}.merge('tagSet' => self.data[:tag_sets][instance['instanceId']]) end end diff --git a/lib/fog/aws/requests/compute/describe_key_pairs.rb b/lib/fog/aws/requests/compute/describe_key_pairs.rb index cdef398b78..4744d64493 100644 --- a/lib/fog/aws/requests/compute/describe_key_pairs.rb +++ b/lib/fog/aws/requests/compute/describe_key_pairs.rb @@ -21,7 +21,7 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeKeyPairs.html] def describe_key_pairs(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_key_pairs with #{filters.class} param is deprecated, use describe_key_pairs('key-name' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_key_pairs with #{filters.class} param is deprecated, use describe_key_pairs('key-name' => []) instead [light_black](#{caller.first})[/]") filters = {'key-name' => [*filters]} end params = Fog::AWS.indexed_filters(filters) @@ -38,7 +38,7 @@ class Mock def describe_key_pairs(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_key_pairs with #{filters.class} param is deprecated, use describe_key_pairs('key-name' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_key_pairs with #{filters.class} param is deprecated, use describe_key_pairs('key-name' => []) instead [light_black](#{caller.first})[/]") filters = {'key-name' => [*filters]} end diff --git a/lib/fog/aws/requests/compute/describe_regions.rb b/lib/fog/aws/requests/compute/describe_regions.rb index 4a8780d320..56289b4e7b 100644 --- a/lib/fog/aws/requests/compute/describe_regions.rb +++ b/lib/fog/aws/requests/compute/describe_regions.rb @@ -21,7 +21,7 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeRegions.html] def describe_regions(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_regions with #{filters.class} param is deprecated, use describe_regions('region-name' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_regions with #{filters.class} param is deprecated, use describe_regions('region-name' => []) instead [light_black](#{caller.first})[/]") filters = {'region-name' => [*filters]} end params = Fog::AWS.indexed_filters(filters) @@ -38,7 +38,7 @@ class Mock def describe_regions(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_regions with #{filters.class} param is deprecated, use describe_regions('region-name' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_regions with #{filters.class} param is deprecated, use describe_regions('region-name' => []) instead [light_black](#{caller.first})[/]") filters = {'region-name' => [*filters]} end diff --git a/lib/fog/aws/requests/compute/describe_reserved_instances.rb b/lib/fog/aws/requests/compute/describe_reserved_instances.rb index 2b913e2f3c..c871afad79 100644 --- a/lib/fog/aws/requests/compute/describe_reserved_instances.rb +++ b/lib/fog/aws/requests/compute/describe_reserved_instances.rb @@ -29,7 +29,7 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeReservedInstances.html] def describe_reserved_instances(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_reserved_instances with #{filters.class} param is deprecated, use describe_reserved_instances('reserved-instances-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_reserved_instances with #{filters.class} param is deprecated, use describe_reserved_instances('reserved-instances-id' => []) instead [light_black](#{caller.first})[/]") filters = {'reserved-instances-id' => [*filters]} end params = Fog::AWS.indexed_filters(filters) diff --git a/lib/fog/aws/requests/compute/describe_security_groups.rb b/lib/fog/aws/requests/compute/describe_security_groups.rb index 96cad2cfe3..8b4b057dae 100644 --- a/lib/fog/aws/requests/compute/describe_security_groups.rb +++ b/lib/fog/aws/requests/compute/describe_security_groups.rb @@ -31,7 +31,7 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeSecurityGroups.html] def describe_security_groups(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_security_groups with #{filters.class} param is deprecated, use describe_security_groups('group-name' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_security_groups with #{filters.class} param is deprecated, use describe_security_groups('group-name' => []) instead [light_black](#{caller.first})[/]") filters = {'group-name' => [*filters]} end params = Fog::AWS.indexed_filters(filters) @@ -48,7 +48,7 @@ class Mock def describe_security_groups(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_security_groups with #{filters.class} param is deprecated, use describe_security_groups('group-name' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_security_groups with #{filters.class} param is deprecated, use describe_security_groups('group-name' => []) instead [light_black](#{caller.first})[/]") filters = {'group-name' => [*filters]} end diff --git a/lib/fog/aws/requests/compute/describe_snapshots.rb b/lib/fog/aws/requests/compute/describe_snapshots.rb index 0a8c9add5c..8f4c62dc0d 100644 --- a/lib/fog/aws/requests/compute/describe_snapshots.rb +++ b/lib/fog/aws/requests/compute/describe_snapshots.rb @@ -27,11 +27,11 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeSnapshots.html] def describe_snapshots(filters = {}, options = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_snapshots with #{filters.class} param is deprecated, use describe_snapshots('snapshot-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_snapshots with #{filters.class} param is deprecated, use describe_snapshots('snapshot-id' => []) instead [light_black](#{caller.first})[/]") filters = {'snapshot-id' => [*filters]} end unless options.empty? - Fog::Logger.warning("describe_snapshots with a second param is deprecated, use describe_snapshots(options) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_snapshots with a second param is deprecated, use describe_snapshots(options) instead [light_black](#{caller.first})[/]") end for key in ['ExecutableBy', 'ImageId', 'Owner', 'RestorableBy'] @@ -54,11 +54,11 @@ class Mock def describe_snapshots(filters = {}, options = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_snapshots with #{filters.class} param is deprecated, use describe_snapshots('snapshot-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_snapshots with #{filters.class} param is deprecated, use describe_snapshots('snapshot-id' => []) instead [light_black](#{caller.first})[/]") filters = {'snapshot-id' => [*filters]} end unless options.empty? - Fog::Logger.warning("describe_snapshots with a second param is deprecated, use describe_snapshots(options) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_snapshots with a second param is deprecated, use describe_snapshots(options) instead [light_black](#{caller.first})[/]") end response = Excon::Response.new @@ -72,7 +72,7 @@ def describe_snapshots(filters = {}, options = {}) Fog::Logger.warning("describe_snapshots with RestorableBy other than 'self' (wanted #{restorable_by.inspect}) is not mocked [light_black](#{caller.first})[/]") end - snapshot_set = apply_tag_filters(snapshot_set, filters) + snapshot_set = apply_tag_filters(snapshot_set, filters, 'snapshotId') aliases = { 'description' => 'description', @@ -106,6 +106,8 @@ def describe_snapshots(filters = {}, options = {}) end end + snapshot_set = snapshot_set.map {|snapshot| snapshot.merge('tagSet' => self.data[:tag_sets][snapshot['snapshotId']]) } + response.status = 200 response.body = { 'requestId' => Fog::AWS::Mock.request_id, diff --git a/lib/fog/aws/requests/compute/describe_volumes.rb b/lib/fog/aws/requests/compute/describe_volumes.rb index 84083867ed..e663d354b2 100644 --- a/lib/fog/aws/requests/compute/describe_volumes.rb +++ b/lib/fog/aws/requests/compute/describe_volumes.rb @@ -31,7 +31,7 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeVolumes.html] def describe_volumes(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_volumes with #{filters.class} param is deprecated, use describe_volumes('volume-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_volumes with #{filters.class} param is deprecated, use describe_volumes('volume-id' => []) instead [light_black](#{caller.first})[/]") filters = {'volume-id' => [*filters]} end params = Fog::AWS.indexed_filters(filters) @@ -48,15 +48,15 @@ class Mock def describe_volumes(filters = {}) unless filters.is_a?(Hash) - Fog::Logger.warning("describe_volumes with #{filters.class} param is deprecated, use describe_volumes('volume-id' => []) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("describe_volumes with #{filters.class} param is deprecated, use describe_volumes('volume-id' => []) instead [light_black](#{caller.first})[/]") filters = {'volume-id' => [*filters]} end response = Excon::Response.new volume_set = self.data[:volumes].values - volume_set = apply_tag_filters(volume_set, filters) - + volume_set = apply_tag_filters(volume_set, filters, 'volumeId') + aliases = { 'availability-zone' => 'availabilityZone', 'create-time' => 'createTime', @@ -102,6 +102,7 @@ def describe_volumes(filters = {}) end end volume_set = volume_set.reject {|volume| !self.data[:volumes][volume['volumeId']]} + volume_set = volume_set.map {|volume| volume.merge('tagSet' => self.data[:tag_sets][volume['volumeId']]) } response.status = 200 response.body = { diff --git a/lib/fog/aws/requests/compute/modify_image_attribute.rb b/lib/fog/aws/requests/compute/modify_image_attribute.rb index f8bd0606fb..8d8cf09dfc 100644 --- a/lib/fog/aws/requests/compute/modify_image_attribute.rb +++ b/lib/fog/aws/requests/compute/modify_image_attribute.rb @@ -20,6 +20,8 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-ModifyImageAttribute.html] # def modify_image_attribute(image_id, attributes) + raise ArgumentError.new("image_id is required") unless image_id + params = {} params.merge!(Fog::AWS.indexed_param('LaunchPermission.Add.%d.Group', attributes['Add.Group'] || [])) params.merge!(Fog::AWS.indexed_param('LaunchPermission.Add.%d.UserId', attributes['Add.UserId'] || [])) @@ -35,6 +37,38 @@ def modify_image_attribute(image_id, attributes) end end + + class Mock + + def modify_image_attribute(image_id, attributes) + raise ArgumentError.new("image_id is required") unless image_id + + unless self.data[:images][image_id] + raise Fog::Compute::AWS::NotFound.new("The AMI ID '#{image_id}' does not exist") + end + + (attributes['Add.UserId'] || []).each do |user_id| + if image_launch_permissions = self.data[:image_launch_permissions][image_id] + image_launch_permissions[:users].push(user_id) + end + end + + (attributes['Remove.UserId'] || []).each do |user_id| + if image_launch_permissions = self.data[:image_launch_permissions][image_id] + image_launch_permissions[:users].delete(user_id) + end + end + + response = Excon::Response.new + response.status = 200 + response.body = { + 'return' => true, + 'requestId' => Fog::AWS::Mock.request_id + } + response + end + + end end end end diff --git a/lib/fog/aws/requests/compute/modify_instance_attribute.rb b/lib/fog/aws/requests/compute/modify_instance_attribute.rb index 786af73313..e5ee0bb1e0 100644 --- a/lib/fog/aws/requests/compute/modify_instance_attribute.rb +++ b/lib/fog/aws/requests/compute/modify_instance_attribute.rb @@ -34,7 +34,7 @@ def modify_instance_attribute(instance_id, attributes) end def modify_instance_attributes(instance_id, attributes) - Fog::Logger.warning("modify_instance_attributes method is deprecated, use 'modify_instance_attribute' instead") + Fog::Logger.deprecation("modify_instance_attributes method is deprecated, use 'modify_instance_attribute' instead") modify_instance_attribute(instance_id, attributes) end diff --git a/lib/fog/aws/requests/compute/purchase_reserved_instances_offering.rb b/lib/fog/aws/requests/compute/purchase_reserved_instances_offering.rb index c5b9f8df17..ef04ea662f 100644 --- a/lib/fog/aws/requests/compute/purchase_reserved_instances_offering.rb +++ b/lib/fog/aws/requests/compute/purchase_reserved_instances_offering.rb @@ -37,7 +37,7 @@ def purchase_reserved_instances_offering(reserved_instances_offering_id, instanc # Need to implement filters in the mock to find this there instead of here # Also there's no information about what to do when the specified reserved_instances_offering_id doesn't exist - raise unless reserved_instance_offering = Compute[:aws].describe_reserved_instances_offerings.body["reservedInstancesOfferingsSet"].find { |offering| offering["reservedInstancesOfferingId"] == reserved_instances_offering_id } + raise unless reserved_instance_offering = describe_reserved_instances_offerings.body["reservedInstancesOfferingsSet"].find { |offering| offering["reservedInstancesOfferingId"] == reserved_instances_offering_id } reserved_instances_id = Fog::AWS::Mock.reserved_instances_id reserved_instance_offering.delete('reservedInstancesOfferingId') diff --git a/lib/fog/aws/requests/compute/register_image.rb b/lib/fog/aws/requests/compute/register_image.rb index 90bf7fcdb4..6a96e383c8 100644 --- a/lib/fog/aws/requests/compute/register_image.rb +++ b/lib/fog/aws/requests/compute/register_image.rb @@ -86,7 +86,6 @@ def register_image(name, description, location, block_devices=[], options={}) 'rootDeviceName' => '', 'blockDeviceMapping' => [], 'virtualizationType' => 'paravirtual', - 'tagSet' => {}, 'hypervisor' => 'xen', 'registered' => Time.now } diff --git a/lib/fog/aws/requests/compute/revoke_security_group_ingress.rb b/lib/fog/aws/requests/compute/revoke_security_group_ingress.rb index 6bd02abb1d..6cc119658f 100644 --- a/lib/fog/aws/requests/compute/revoke_security_group_ingress.rb +++ b/lib/fog/aws/requests/compute/revoke_security_group_ingress.rb @@ -8,15 +8,28 @@ class Real # Remove permissions from a security group # # ==== Parameters - # * 'GroupName'<~String> - Name of group + # * group_name<~String> - Name of group # * options<~Hash>: # * 'SourceSecurityGroupName'<~String> - Name of security group to authorize # * 'SourceSecurityGroupOwnerId'<~String> - Name of owner to authorize # or - # * 'CidrIp' - CIDR range - # * 'FromPort' - Start of port range (or -1 for ICMP wildcard) - # * 'IpProtocol' - Ip protocol, must be in ['tcp', 'udp', 'icmp'] - # * 'ToPort' - End of port range (or -1 for ICMP wildcard) + # * 'CidrIp'<~String> - CIDR range + # * 'FromPort'<~Integer> - Start of port range (or -1 for ICMP wildcard) + # * 'IpProtocol'<~String> - Ip protocol, must be in ['tcp', 'udp', 'icmp'] + # * 'ToPort'<~Integer> - End of port range (or -1 for ICMP wildcard) + # or + # * 'IpPermissions'<~Array>: + # * permission<~Hash>: + # * 'FromPort'<~Integer> - Start of port range (or -1 for ICMP wildcard) + # * 'Groups'<~Array>: + # * group<~Hash>: + # * 'GroupName'<~String> - Name of security group to authorize + # * 'UserId'<~String> - Name of owner to authorize + # * 'IpProtocol'<~String> - Ip protocol, must be in ['tcp', 'udp', 'icmp'] + # * 'IpRanges'<~Array>: + # * ip_range<~Hash>: + # * 'CidrIp'<~String> - CIDR range + # * 'ToPort'<~Integer> - End of port range (or -1 for ICMP wildcard) # # === Returns # * response<~Excon::Response>: @@ -27,10 +40,15 @@ class Real # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-RevokeSecurityGroupIngress.html] def revoke_security_group_ingress(group_name, options = {}) if group_name.is_a?(Hash) - Fog::Logger.warning("Fog::AWS::Compute#revoke_security_group_ingress now requires the 'group_name' parameter. Only specifying an options hash is now deprecated [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("Fog::AWS::Compute#revoke_security_group_ingress now requires the 'group_name' parameter. Only specifying an options hash is now deprecated [light_black](#{caller.first})[/]") options = group_name - group_name = options['GroupName'] + group_name = options.delete('GroupName') + end + + if ip_permissions = options.delete('IpPermissions') + options.merge!(indexed_ip_permissions_params(ip_permissions)) end + request({ 'Action' => 'RevokeSecurityGroupIngress', 'GroupName' => group_name, @@ -45,36 +63,30 @@ class Mock def revoke_security_group_ingress(group_name, options = {}) if group_name.is_a?(Hash) - Fog::Logger.warning("Fog::AWS::Compute#revoke_security_group_ingress now requires the 'group_name' parameter. Only specifying an options hash is now deprecated [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("Fog::AWS::Compute#revoke_security_group_ingress now requires the 'group_name' parameter. Only specifying an options hash is now deprecated [light_black](#{caller.first})[/]") options = group_name - group_name = options['GroupName'] + group_name = options.delete('GroupName') end + + verify_permission_options(options) + response = Excon::Response.new group = self.data[:security_groups][group_name] + if group - if source_group_name = options['SourceSecurityGroupName'] - group['ipPermissions'].delete_if do |permission| - if source_owner_id = options['SourceSecurityGroupOwnerId'] - permission['groups'].first['groupName'] == source_group_name && permission['groups'].first['userId'] == source_owner_id - else - permission['groups'].first['groupName'] == source_group_name + normalized_permissions = normalize_permissions(options) + + normalized_permissions.each do |permission| + if matching_permission = find_matching_permission(group, permission) + matching_permission['ipRanges'] -= permission['ipRanges'] + matching_permission['groups'] -= permission['groups'] + + if matching_permission['ipRanges'].empty? && matching_permission['groups'].empty? + group['ipPermissions'].delete(matching_permission) end end - else - ingress = group['ipPermissions'].select {|permission| - permission['fromPort'] == options['FromPort'] && - permission['ipProtocol'] == options['IpProtocol'] && - permission['toPort'] == options['ToPort'] && - ( - permission['ipRanges'].empty? || - ( - permission['ipRanges'].first && - permission['ipRanges'].first['cidrIp'] == options['CidrIp'] - ) - ) - }.first - group['ipPermissions'].delete(ingress) end + response.status = 200 response.body = { 'requestId' => Fog::AWS::Mock.request_id, diff --git a/lib/fog/aws/requests/compute/run_instances.rb b/lib/fog/aws/requests/compute/run_instances.rb index 939443ffda..e16c03c5d7 100644 --- a/lib/fog/aws/requests/compute/run_instances.rb +++ b/lib/fog/aws/requests/compute/run_instances.rb @@ -157,8 +157,7 @@ def run_instances(image_id, min_count, max_count, options = {}) 'ownerId' => self.data[:owner_id], 'privateIpAddress' => nil, 'reservationId' => reservation_id, - 'stateReason' => {}, - 'tagSet' => {} + 'stateReason' => {} }) end response.body = { diff --git a/lib/fog/aws/requests/elasticache/authorize_cache_security_group_ingress.rb b/lib/fog/aws/requests/elasticache/authorize_cache_security_group_ingress.rb new file mode 100644 index 0000000000..06b19d6f45 --- /dev/null +++ b/lib/fog/aws/requests/elasticache/authorize_cache_security_group_ingress.rb @@ -0,0 +1,36 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/single_security_group' + + # Authorize ingress to a CacheSecurityGroup using EC2 Security Groups + # + # === Parameters + # * name <~String> - The name of the cache security group + # * ec2_name <~String> - The name of the EC2 security group to authorize + # * ec2_owner_id <~String> - The AWS Account Number of the EC2 security group + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def authorize_cache_security_group_ingress(name, ec2_name, ec2_owner_id) + request({ + 'Action' => 'AuthorizeCacheSecurityGroupIngress', + 'CacheSecurityGroupName' => name, + 'EC2SecurityGroupName' => ec2_name, + 'EC2SecurityGroupOwnerId' => ec2_owner_id, + :parser => Fog::Parsers::AWS::Elasticache::SingleSecurityGroup.new + }) + end + + end + + class Mock + def authorize_cache_security_group_ingress + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/create_cache_cluster.rb b/lib/fog/aws/requests/elasticache/create_cache_cluster.rb new file mode 100644 index 0000000000..6ac85f7f92 --- /dev/null +++ b/lib/fog/aws/requests/elasticache/create_cache_cluster.rb @@ -0,0 +1,62 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/single_cache_cluster' + # creates a cache cluster + # + # === Required Parameters + # * id <~String> - A unique cluster ID - 20 characters max. + # === Optional Parameters + # * options <~Hash> - All optional parameters should be set in this Hash: + # * :node_type <~String> - The size (flavor) of the cache Nodes + # * :security_group_names <~Array> - Array of Elasticache::SecurityGroup names + # * :num_nodes <~Integer> - The number of nodes in the Cluster + # * :auto_minor_version_upgrade <~TrueFalseClass> + # * :parameter_group_name <~String> - Name of the Cluster's ParameterGroup + # * :engine <~String> - The Cluster's caching software (memcached) + # * :engine_version <~String> - The Cluster's caching software version + # * :notification_topic_arn <~String> - Amazon SNS Resource Name + # * :port <~Integer> - The memcached port number + # * :preferred_availablility_zone <~String> + # * :preferred_maintenance_window <~String> + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def create_cache_cluster(id, options = {}) + # Construct Cache Security Group parameters in the format: + # CacheSecurityGroupNames.member.N => "security_group_name" + group_names = options[:security_group_names] || ['default'] + sec_group_params = group_names.inject({}) do |group_hash, name| + index = group_names.index(name) + 1 + group_hash["CacheSecurityGroupNames.member.#{index}"] = name + group_hash + end + # Merge the Cache Security Group parameters with the normal options + request(sec_group_params.merge( + 'Action' => 'CreateCacheCluster', + 'CacheClusterId' => id, + 'CacheNodeType' => options[:node_type] || 'cache.m1.large', + 'Engine' => options[:engine] || 'memcached', + 'NumCacheNodes' => options[:num_nodes] || 1, + 'AutoMinorVersionUpgrade' => options[:auto_minor_version_upgrade], + 'CacheParameterGroupName' => options[:parameter_group_name], + 'EngineVersion' => options[:engine_version], + 'NotificationTopicArn' => options[:notification_topic_arn], + 'Port' => options[:port], + 'PreferredAvailabilityZone' => options[:preferred_availablility_zone], + 'PreferredMaintenanceWindow' => options[:preferred_maintenance_window], + :parser => Fog::Parsers::AWS::Elasticache::SingleCacheCluster.new + )) + end + end + + class Mock + def create_cache_cluster(id, options = {}) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/create_cache_parameter_group.rb b/lib/fog/aws/requests/elasticache/create_cache_parameter_group.rb new file mode 100644 index 0000000000..cb9ffdab7b --- /dev/null +++ b/lib/fog/aws/requests/elasticache/create_cache_parameter_group.rb @@ -0,0 +1,37 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/single_parameter_group' + + # creates a cache parameter group + # + # === Parameters + # * name <~String> - The name for the Cache Parameter Group + # === Optional Parameters + # * description <~String> - The description for the Cache Parameter Group + # * family <~String> - The description for the Cache Parameter Group + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def create_cache_parameter_group(name, description = name, family = 'memcached1.4') + request({ + 'Action' => 'CreateCacheParameterGroup', + 'CacheParameterGroupName' => name, + 'Description' => description, + 'CacheParameterGroupFamily' => family, + :parser => Fog::Parsers::AWS::Elasticache::SingleParameterGroup.new + }) + end + end + + class Mock + def create_cache_parameter_group(name, description = name, + family = 'memcached1.4') + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/create_cache_security_group.rb b/lib/fog/aws/requests/elasticache/create_cache_security_group.rb new file mode 100644 index 0000000000..296218e78b --- /dev/null +++ b/lib/fog/aws/requests/elasticache/create_cache_security_group.rb @@ -0,0 +1,33 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/single_security_group' + + # creates a cache security group + # + # === Parameters + # * name <~String> - The name for the Cache Security Group + # * description <~String> - The description for the Cache Security Group + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def create_cache_security_group(name, description = name) + request({ + 'Action' => 'CreateCacheSecurityGroup', + 'CacheSecurityGroupName' => name, + 'Description' => description, + :parser => Fog::Parsers::AWS::Elasticache::SingleSecurityGroup.new + }) + end + end + + class Mock + def create_cache_security_group(name, desciption=name) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/delete_cache_cluster.rb b/lib/fog/aws/requests/elasticache/delete_cache_cluster.rb new file mode 100644 index 0000000000..a0a967712c --- /dev/null +++ b/lib/fog/aws/requests/elasticache/delete_cache_cluster.rb @@ -0,0 +1,32 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/describe_cache_clusters' + + # Deletes a Cache Cluster + # + # === Parameter (required): + # * id <~String> - The ID of the cache cluster to delete + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def delete_cache_cluster(cluster_id) + request( + 'Action' => 'DeleteCacheCluster', + 'CacheClusterId' => cluster_id, + :parser => Fog::Parsers::AWS::Elasticache::DescribeCacheClusters.new + ) + end + + end + + class Mock + def delete_cache_cluster(cluster_id) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/delete_cache_parameter_group.rb b/lib/fog/aws/requests/elasticache/delete_cache_parameter_group.rb new file mode 100644 index 0000000000..3704fb7416 --- /dev/null +++ b/lib/fog/aws/requests/elasticache/delete_cache_parameter_group.rb @@ -0,0 +1,31 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/base' + + # deletes a cache parameter group + # + # === Parameters + # * name <~String> - The name for the Cache Parameter Group + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def delete_cache_parameter_group(name) + request({ + 'Action' => 'DeleteCacheParameterGroup', + 'CacheParameterGroupName' => name, + :parser => Fog::Parsers::AWS::Elasticache::Base.new + }) + end + end + + class Mock + def delete_cache_parameter_group(name) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/delete_cache_security_group.rb b/lib/fog/aws/requests/elasticache/delete_cache_security_group.rb new file mode 100644 index 0000000000..840f9d5681 --- /dev/null +++ b/lib/fog/aws/requests/elasticache/delete_cache_security_group.rb @@ -0,0 +1,31 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/base' + + # deletes a cache security group + # + # === Parameters + # * name <~String> - The name for the Cache Security Group + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def delete_cache_security_group(name) + request({ + 'Action' => 'DeleteCacheSecurityGroup', + 'CacheSecurityGroupName' => name, + :parser => Fog::Parsers::AWS::Elasticache::Base.new + }) + end + end + + class Mock + def delete_cache_security_group(name) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/describe_cache_clusters.rb b/lib/fog/aws/requests/elasticache/describe_cache_clusters.rb new file mode 100644 index 0000000000..1c044ab3ff --- /dev/null +++ b/lib/fog/aws/requests/elasticache/describe_cache_clusters.rb @@ -0,0 +1,39 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/describe_cache_clusters' + + # Returns a list of Cache Cluster descriptions + # + # === Parameters (optional) + # * id - The ID of an existing cache cluster + # * options <~Hash> (optional): + # * :marker <~String> - marker provided in the previous request + # * :max_records <~Integer> - the maximum number of records to include + # * :show_node_info <~Boolean> - whether to show node info + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def describe_cache_clusters(id = nil, options = {}) + request({ + 'Action' => 'DescribeCacheClusters', + 'CacheClusterId' => id, + 'Marker' => options[:marker], + 'MaxRecords' => options[:max_records], + 'ShowCacheNodeInfo' => options[:show_node_info], + :parser => Fog::Parsers::AWS::Elasticache::DescribeCacheClusters.new + }) + end + + end + + class Mock + def describe_cache_clusters(id = nil, options = {}) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/describe_cache_parameter_groups.rb b/lib/fog/aws/requests/elasticache/describe_cache_parameter_groups.rb new file mode 100644 index 0000000000..f3423fc7ad --- /dev/null +++ b/lib/fog/aws/requests/elasticache/describe_cache_parameter_groups.rb @@ -0,0 +1,34 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/describe_parameter_groups' + + # Returns a list of CacheParameterGroup descriptions + # + # === Parameters (optional) + # * name <~String> - The name of an existing cache parameter group + # * options <~Hash> (optional): + # * :marker <~String> - marker provided in the previous request + # * :max_records <~Integer> - the maximum number of records to include + def describe_cache_parameter_groups(name = nil, options = {}) + request({ + 'Action' => 'DescribeCacheParameterGroups', + 'CacheParameterGroupName' => name, + 'Marker' => options[:marker], + 'MaxRecords' => options[:max_records], + :parser => Fog::Parsers::AWS::Elasticache::DescribeParameterGroups.new + }.merge(options)) + end + + end + + class Mock + def describe_cache_parameter_groups(name = nil, options = {}) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/describe_cache_parameters.rb b/lib/fog/aws/requests/elasticache/describe_cache_parameters.rb new file mode 100644 index 0000000000..99ffc72530 --- /dev/null +++ b/lib/fog/aws/requests/elasticache/describe_cache_parameters.rb @@ -0,0 +1,36 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/describe_cache_parameters' + + # Returns a list of CacheParameterGroup descriptions + # + # === Parameters (optional) + # * name <~String> - The name of an existing cache parameter group + # * options <~Hash> (optional): + # * :marker <~String> - marker provided in the previous request + # * :max_records <~Integer> - the maximum number of records to include + # * :source <~String> - the parameter types to return. + def describe_cache_parameters(name = nil, options = {}) + request({ + 'Action' => 'DescribeCacheParameters', + 'CacheParameterGroupName' => name, + 'Marker' => options[:marker], + 'MaxRecords' => options[:max_records], + 'Source' => options[:source], + :parser => Fog::Parsers::AWS::Elasticache::DescribeCacheParameters.new + }) + end + + end + + class Mock + def describe_cache_parameters(name = nil, options = {}) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/describe_cache_security_groups.rb b/lib/fog/aws/requests/elasticache/describe_cache_security_groups.rb new file mode 100644 index 0000000000..e09b7344f4 --- /dev/null +++ b/lib/fog/aws/requests/elasticache/describe_cache_security_groups.rb @@ -0,0 +1,34 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/describe_security_groups' + + # Returns a list of CacheSecurityGroup descriptions + # + # === Parameters (optional) + # * name <~String> - The name of an existing cache security group + # * options <~Hash> (optional): + # * :marker <~String> - marker provided in the previous request + # * :max_records <~Integer> - the maximum number of records to include + def describe_cache_security_groups(name = nil, options = {}) + request({ + 'Action' => 'DescribeCacheSecurityGroups', + 'CacheSecurityGroupName' => name, + 'Marker' => options[:marker], + 'MaxRecords' => options[:max_records], + :parser => Fog::Parsers::AWS::Elasticache::DescribeSecurityGroups.new + }.merge(options)) + end + + end + + class Mock + def describe_cache_security_groups(name = nil, options = {}) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/describe_engine_default_parameters.rb b/lib/fog/aws/requests/elasticache/describe_engine_default_parameters.rb new file mode 100644 index 0000000000..11841505d5 --- /dev/null +++ b/lib/fog/aws/requests/elasticache/describe_engine_default_parameters.rb @@ -0,0 +1,35 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/describe_engine_default_parameters' + + # Returns the default engine and system parameter information + # for the specified cache engine. + # + # === Parameters (optional) + # * options <~Hash>: + # * :engine <~String> - the engine whose parameters are requested + # * :marker <~String> - marker provided in the previous request + # * :max_records <~Integer> - the maximum number of records to include + def describe_engine_default_parameters(options = {}) + request({ + 'Action' => 'DescribeEngineDefaultParameters', + 'CacheParameterGroupFamily' => options[:engine] || 'memcached1.4', + 'Marker' => options[:marker], + 'MaxRecords' => options[:max_records], + :parser => Fog::Parsers::AWS::Elasticache::DescribeEngineDefaultParameters.new + }) + end + + end + + class Mock + def describe_engine_defalut_parameters(options = {}) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/describe_events.rb b/lib/fog/aws/requests/elasticache/describe_events.rb new file mode 100644 index 0000000000..1f5402a920 --- /dev/null +++ b/lib/fog/aws/requests/elasticache/describe_events.rb @@ -0,0 +1,46 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/event_list' + + # Returns a list of service events + # + # === Parameters (optional) + # * options <~Hash> (optional): + # * :start_time <~DateTime> - starting time for event records + # * :end_time <~DateTime> - ending time for event records + # * :duration <~DateTime> - time span for event records + # * :marker <~String> - marker provided in the previous request + # * :max_records <~Integer> - the maximum number of records to include + # * :source_identifier <~DateTime> - identifier of the event source + # * :source_type <~DateTime> - event type, one of: + # (cache-cluster | cache-parameter-group | cache-security-group) + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def describe_events(options = {}) + request( + 'Action' => 'DescribeEvents', + 'StartTime' => options[:sart_time], + 'EndTime' => options[:end_time], + 'Duration' => options[:duration], + 'Marker' => options[:marker], + 'MaxRecords' => options[:max_records], + 'SourceIdentifier' => options[:source_identifier], + 'SourceType' => options[:source_type], + :parser => Fog::Parsers::AWS::Elasticache::EventListParser.new + ) + end + + end + + class Mock + def describe_events + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/modify_cache_cluster.rb b/lib/fog/aws/requests/elasticache/modify_cache_cluster.rb new file mode 100644 index 0000000000..318cb2a8a6 --- /dev/null +++ b/lib/fog/aws/requests/elasticache/modify_cache_cluster.rb @@ -0,0 +1,70 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/single_cache_cluster' + + # Modifies an existing cache cluster + # Returns a cache cluster description + # + # === Required Parameters + # * id <~String> - The ID of the existing cluster to be modified + # === Optional Parameters + # * options <~Hash> - All optional parameters should be set in this Hash: + # * :apply_immediately <~TrueFalseClass> - whether to apply changes now + # * :auto_minor_version_upgrade <~TrueFalseClass> + # * :num_nodes <~Integer> - The number of nodes in the Cluster + # * :nodes_to_remove <~Array> - Array of node IDs to delete + # * :security_group_names <~Array> - Array of Elasticache::SecurityGroup names + # * :parameter_group_name <~String> - Name of the Cluster's ParameterGroup + # * :engine_version <~String> - The Cluster's caching software version + # * :notification_topic_arn <~String> - Amazon SNS Resource Name + # * :notification_topic_status <~String> - Amazon SNS Topic status + # * :preferred_maintenance_window <~String> + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def modify_cache_cluster(id, options = {}) + # Construct Cache Security Group parameters in the format: + # CacheSecurityGroupNames.member.N => "security_group_name" + group_names = options[:security_group_names] || [] + sec_group_params = group_names.inject({}) do |group_hash, name| + index = group_names.index(name) + 1 + group_hash["CacheSecurityGroupNames.member.#{index}"] = name + group_hash + end + # Construct CacheNodeIdsToRemove parameters in the format: + # CacheNodeIdsToRemove.member.N => "node_id" + node_ids = options[:nodes_to_remove] || [] + node_id_params = node_ids.inject({}) do |node_hash, node_id| + index = node_ids.index(node_id) + 1 + node_hash["CacheNodeIdsToRemove.member.#{index}"] = node_id + node_hash + end + # Merge the Cache Security Group parameters with the normal options + request(node_id_params.merge(sec_group_params.merge( + 'Action' => 'ModifyCacheCluster', + 'CacheClusterId' => id, + 'ApplyImmediately' => options[:apply_immediately], + 'NumCacheNodes' => options[:num_nodes], + 'AutoMinorVersionUpgrade' => options[:auto_minor_version_upgrade], + 'CacheParameterGroupName' => options[:parameter_group_name], + 'EngineVersion' => options[:engine_version], + 'NotificationTopicArn' => options[:notification_topic_arn], + 'NotificationTopicStatus' => options[:notification_topic_status], + 'PreferredMaintenanceWindow' => options[:preferred_maintenance_window], + :parser => Fog::Parsers::AWS::Elasticache::SingleCacheCluster.new + ))) + end + + end + + class Mock + def modify_cache_cluster + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/modify_cache_parameter_group.rb b/lib/fog/aws/requests/elasticache/modify_cache_parameter_group.rb new file mode 100644 index 0000000000..364b2eb9fe --- /dev/null +++ b/lib/fog/aws/requests/elasticache/modify_cache_parameter_group.rb @@ -0,0 +1,45 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/modify_parameter_group' + + # Modifies an existing cache parameter group + # Returns a the name of the modified parameter group + # + # === Required Parameters + # * id <~String> - The ID of the parameter group to be modified + # * new_parameters <~Hash> - The parameters to modify, and their values + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def modify_cache_parameter_group(id, new_parameters) + # Construct Parameter Modifications in the format: + # ParameterNameValues.member.N.ParameterName => "param_name" + # ParameterNameValues.member.N.ParameterValue => "param_value" + n = 0 # n is the parameter index + parameter_changes = new_parameters.inject({}) do |new_args,pair| + n += 1 + new_args["ParameterNameValues.member.#{n}.ParameterName"] = pair[0] + new_args["ParameterNameValues.member.#{n}.ParameterValue"] = pair[1] + new_args + end + # Merge the Cache Security Group parameters with the normal options + request(parameter_changes.merge( + 'Action' => 'ModifyCacheParameterGroup', + 'CacheParameterGroupName' => id, + :parser => Fog::Parsers::AWS::Elasticache::ModifyParameterGroup.new + )) + end + + end + + class Mock + def modify_cache_parameter_group(id, new_parameters) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/reboot_cache_cluster.rb b/lib/fog/aws/requests/elasticache/reboot_cache_cluster.rb new file mode 100644 index 0000000000..ecfb102628 --- /dev/null +++ b/lib/fog/aws/requests/elasticache/reboot_cache_cluster.rb @@ -0,0 +1,44 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/single_cache_cluster' + + # Reboots some or all of an existing cache cluster's nodes + # Returns a cache cluster description + # + # === Required Parameters + # * id <~String> - The ID of the existing cluster to be rebooted + # === Optional Parameters + # * nodes_to_reboot <~Array> - Array of node IDs to reboot + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def reboot_cache_cluster(id, nodes_to_reboot) + # Construct CacheNodeIdsToReboot parameters in the format: + # CacheNodeIdsToReboot.member.N => "node_id" + node_ids = nodes_to_reboot || [] + node_id_params = node_ids.inject({}) do |node_hash, node_id| + index = node_ids.index(node_id) + 1 + node_hash["CacheNodeIdsToReboot.member.#{index}"] = node_id + node_hash + end + # Merge the CacheNodeIdsToReboot parameters with the normal options + request(node_id_params.merge( + 'Action' => 'RebootCacheCluster', + 'CacheClusterId' => id, + :parser => Fog::Parsers::AWS::Elasticache::SingleCacheCluster.new + )) + end + + end + + class Mock + def reboot_cache_cluster + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/reset_cache_parameter_group.rb b/lib/fog/aws/requests/elasticache/reset_cache_parameter_group.rb new file mode 100644 index 0000000000..6f834ded46 --- /dev/null +++ b/lib/fog/aws/requests/elasticache/reset_cache_parameter_group.rb @@ -0,0 +1,46 @@ +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/reset_parameter_group' + + # Resets an existing cache parameter group + # Returns a the name of the modified parameter group + # + # === Required Parameters + # * id <~String> - The ID of the parameter group to be modified + # === Optional Parameters + # * parameter_names <~Array> - The parameters to reset + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def reset_cache_parameter_group(id, parameter_names = []) + # Construct Parameter resets in the format: + # ParameterNameValues.member.N => "param_name" + parameter_changes = parameter_names.inject({}) do |new_args, param| + index = parameter_names.index(param) + 1 + new_args["ParameterNameValues.member.#{index}"] = param + new_args + end + if parameter_changes.empty? + parameter_changes = {'ResetAllParameters' => 'true'} + end + # Merge the Cache Security Group parameters with the normal options + request(parameter_changes.merge( + 'Action' => 'ResetCacheParameterGroup', + 'CacheParameterGroupName' => id, + :parser => Fog::Parsers::AWS::Elasticache::ResetParameterGroup.new + )) + end + + end + + class Mock + def reset_cache_parameter_group(id, parameter_names) + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elasticache/revoke_cache_security_group_ingress.rb b/lib/fog/aws/requests/elasticache/revoke_cache_security_group_ingress.rb new file mode 100644 index 0000000000..7607b588ac --- /dev/null +++ b/lib/fog/aws/requests/elasticache/revoke_cache_security_group_ingress.rb @@ -0,0 +1,37 @@ + +module Fog + module AWS + class Elasticache + class Real + + require 'fog/aws/parsers/elasticache/single_security_group' + + # Revoke ingress to a CacheSecurityGroup using EC2 Security Groups + # + # === Parameters + # * name <~String> - The name of the cache security group + # * ec2_name <~String> - The name of the EC2 security group to revoke + # * ec2_owner_id <~String> - The AWS Account Number of the EC2 security group + # === Returns + # * response <~Excon::Response>: + # * body <~Hash> + def revoke_cache_security_group_ingress(name, ec2_name, ec2_owner_id) + request({ + 'Action' => 'RevokeCacheSecurityGroupIngress', + 'CacheSecurityGroupName' => name, + 'EC2SecurityGroupName' => ec2_name, + 'EC2SecurityGroupOwnerId' => ec2_owner_id, + :parser => Fog::Parsers::AWS::Elasticache::SingleSecurityGroup.new + }) + end + + end + + class Mock + def revoke_cache_security_group_ingress + Fog::Mock.not_implemented + end + end + end + end +end diff --git a/lib/fog/aws/requests/elb/create_load_balancer.rb b/lib/fog/aws/requests/elb/create_load_balancer.rb index da9a2939f2..bb5922bb1a 100644 --- a/lib/fog/aws/requests/elb/create_load_balancer.rb +++ b/lib/fog/aws/requests/elb/create_load_balancer.rb @@ -56,7 +56,7 @@ def create_load_balancer(availability_zones, lb_name, listeners = []) raise Fog::AWS::ELB::IdentifierTaken if self.data[:load_balancers].has_key? lb_name - certificate_ids = Fog::AWS::IAM.new.list_server_certificates.body['Certificates'].collect { |c| c['Arn'] } + certificate_ids = Fog::AWS::IAM::Mock.data[@aws_access_key_id][:server_certificates].map {|n, c| c['Arn'] } listeners = [*listeners].map do |listener| if listener['SSLCertificateId'] and !certificate_ids.include? listener['SSLCertificateId'] diff --git a/lib/fog/aws/requests/elb/create_load_balancer_listeners.rb b/lib/fog/aws/requests/elb/create_load_balancer_listeners.rb index 51f3219a08..1d89914dc0 100644 --- a/lib/fog/aws/requests/elb/create_load_balancer_listeners.rb +++ b/lib/fog/aws/requests/elb/create_load_balancer_listeners.rb @@ -52,7 +52,7 @@ def create_load_balancer_listeners(lb_name, listeners) if load_balancer = self.data[:load_balancers][lb_name] response = Excon::Response.new - certificate_ids = Fog::AWS::IAM.new.list_server_certificates.body['Certificates'].collect { |c| c['Arn'] } + certificate_ids = Fog::AWS::IAM::Mock.data[@aws_access_key_id][:server_certificates].map {|n, c| c['Arn'] } listeners.each do |listener| if listener['SSLCertificateId'] and !certificate_ids.include? listener['SSLCertificateId'] diff --git a/lib/fog/aws/requests/elb/deregister_instances_from_load_balancer.rb b/lib/fog/aws/requests/elb/deregister_instances_from_load_balancer.rb index ff786cb8ed..cc30020e24 100644 --- a/lib/fog/aws/requests/elb/deregister_instances_from_load_balancer.rb +++ b/lib/fog/aws/requests/elb/deregister_instances_from_load_balancer.rb @@ -37,7 +37,7 @@ def deregister_instances_from_load_balancer(instance_ids, lb_name) raise Fog::AWS::ELB::NotFound unless load_balancer = self.data[:load_balancers][lb_name] instance_ids = [*instance_ids] instance_ids.each do |instance| - raise Fog::AWS::ELB::InvalidInstance unless Compute[:aws].servers.get(instance) + raise Fog::AWS::ELB::InvalidInstance unless Fog::Compute::AWS::Mock.data[@region][@aws_access_key_id][:instances][instance] end response = Excon::Response.new diff --git a/lib/fog/aws/requests/elb/describe_instance_health.rb b/lib/fog/aws/requests/elb/describe_instance_health.rb index db6ec66676..f24bfd6ebf 100644 --- a/lib/fog/aws/requests/elb/describe_instance_health.rb +++ b/lib/fog/aws/requests/elb/describe_instance_health.rb @@ -40,13 +40,13 @@ def describe_instance_health(lb_name, instance_ids = []) instance_ids = [*instance_ids] instance_ids = load_balancer['Instances'].collect { |i| i['InstanceId'] } unless instance_ids.any? data = instance_ids.map do |id| - unless instance = Compute[:aws].servers.get(id) + unless Fog::Compute::AWS::Mock.data[@region][@aws_access_key_id][:instances][id] raise Fog::AWS::ELB::InvalidInstance end { 'Description' => "", - 'InstanceId' => instance.id, + 'InstanceId' => id, 'ReasonCode' => "", 'State' => 'OutOfService' } diff --git a/lib/fog/aws/requests/elb/describe_load_balancers.rb b/lib/fog/aws/requests/elb/describe_load_balancers.rb index fb4f6b377b..0c65322224 100644 --- a/lib/fog/aws/requests/elb/describe_load_balancers.rb +++ b/lib/fog/aws/requests/elb/describe_load_balancers.rb @@ -59,10 +59,10 @@ def describe_load_balancers(lb_names = []) lb_names.map do |lb_name| lb = self.data[:load_balancers].find { |name, data| name == lb_name } raise Fog::AWS::ELB::NotFound unless lb - lb[1] + lb[1].dup end.compact else - self.data[:load_balancers].values + self.data[:load_balancers].map { |lb, values| values.dup } end response = Excon::Response.new @@ -73,7 +73,7 @@ def describe_load_balancers(lb_names = []) 'RequestId' => Fog::AWS::Mock.request_id }, 'DescribeLoadBalancersResult' => { - 'LoadBalancerDescriptions' => load_balancers + 'LoadBalancerDescriptions' => load_balancers.map { |lb| lb['Instances'] = lb['Instances'].map { |i| i['InstanceId'] }; lb } } } diff --git a/lib/fog/aws/requests/elb/register_instances_with_load_balancer.rb b/lib/fog/aws/requests/elb/register_instances_with_load_balancer.rb index 85c9f9519f..ac0e2e0d8b 100644 --- a/lib/fog/aws/requests/elb/register_instances_with_load_balancer.rb +++ b/lib/fog/aws/requests/elb/register_instances_with_load_balancer.rb @@ -37,14 +37,14 @@ def register_instances_with_load_balancer(instance_ids, lb_name) raise Fog::AWS::ELB::NotFound unless load_balancer = self.data[:load_balancers][lb_name] instance_ids = [*instance_ids] instances = instance_ids.map do |instance| - raise Fog::AWS::ELB::InvalidInstance unless Compute[:aws].servers.get(instance) + raise Fog::AWS::ELB::InvalidInstance unless Fog::Compute::AWS::Mock.data[@region][@aws_access_key_id][:instances][instance] {'InstanceId' => instance} end response = Excon::Response.new response.status = 200 - load_balancer['Instances'] = instances.dup + load_balancer['Instances'] = load_balancer['Instances'] | instances.dup response.body = { 'ResponseMetadata' => { diff --git a/lib/fog/aws/requests/iam/get_server_certificate.rb b/lib/fog/aws/requests/iam/get_server_certificate.rb index a6b0be2043..8a0766a83b 100644 --- a/lib/fog/aws/requests/iam/get_server_certificate.rb +++ b/lib/fog/aws/requests/iam/get_server_certificate.rb @@ -18,10 +18,10 @@ class Real # ==== See Also # http://docs.amazonwebservices.com/IAM/latest/APIReference/API_GetServerCertificate.html # - def get_server_certificate(server_certificate_name) + def get_server_certificate(name) request({ 'Action' => 'GetServerCertificate', - 'ServerCertificateName' => server_certificate_name, + 'ServerCertificateName' => name, :parser => Fog::Parsers::AWS::IAM::UploadServerCertificate.new }) end @@ -29,18 +29,16 @@ def get_server_certificate(server_certificate_name) end class Mock - def get_server_certificate(server_certificate_name) - raise Fog::AWS::IAM::NotFound unless self.data[:server_certificates].key?(server_certificate_name) + def get_server_certificate(name) + raise Fog::AWS::IAM::NotFound unless certificate = self.data[:server_certificates][name] response = Excon::Response.new response.status = 200 response.body = { - 'Certificate' => self.data[:server_certificates][server_certificate_name], + 'Certificate' => certificate, 'RequestId' => Fog::AWS::Mock.request_id } - self.data[:server_certificates] - response end end diff --git a/lib/fog/aws/requests/iam/list_server_certificates.rb b/lib/fog/aws/requests/iam/list_server_certificates.rb index 1ad820e59b..267161d96d 100644 --- a/lib/fog/aws/requests/iam/list_server_certificates.rb +++ b/lib/fog/aws/requests/iam/list_server_certificates.rb @@ -40,10 +40,12 @@ def list_server_certificates(options = {}) class Mock def list_server_certificates(options = {}) + certificates = self.data[:server_certificates].values + certificates = certificates.select { |certificate| certificate['Path'] =~ Regexp.new("^#{options['PathPrefix']}") } if options['PathPrefix'] response = Excon::Response.new response.status = 200 response.body = { - 'Certificates' => self.data[:server_certificates].values + 'Certificates' => certificates } response diff --git a/lib/fog/aws/requests/iam/upload_server_certificate.rb b/lib/fog/aws/requests/iam/upload_server_certificate.rb index 83db3fe946..8357edbd4f 100644 --- a/lib/fog/aws/requests/iam/upload_server_certificate.rb +++ b/lib/fog/aws/requests/iam/upload_server_certificate.rb @@ -52,6 +52,7 @@ def upload_server_certificate(certificate, private_key, name, options = {}) # Validate cert and key begin cert = OpenSSL::X509::Certificate.new(certificate) + chain = OpenSSL::X509::Certificate.new(options['CertificateChain']) if options['CertificateChain'] key = OpenSSL::PKey::RSA.new(private_key) rescue OpenSSL::X509::CertificateError, OpenSSL::PKey::RSAError => e message = if e.is_a?(OpenSSL::X509::CertificateError) @@ -70,9 +71,9 @@ def upload_server_certificate(certificate, private_key, name, options = {}) raise Fog::AWS::IAM::EntityAlreadyExists.new else response.status = 200 - path = "server-certificates/#{name}" + path = options['path'] || "/" data = { - 'Arn' => Fog::AWS::Mock.arn('iam', self.data[:owner_id], path), + 'Arn' => Fog::AWS::Mock.arn('iam', self.data[:owner_id], "server-certificate/#{name}"), 'Path' => path, 'ServerCertificateId' => Fog::AWS::IAM::Mock.server_certificate_id, 'ServerCertificateName' => name, diff --git a/lib/fog/aws/requests/simpledb/get_attributes.rb b/lib/fog/aws/requests/simpledb/get_attributes.rb index 6607ad0360..079cd88888 100644 --- a/lib/fog/aws/requests/simpledb/get_attributes.rb +++ b/lib/fog/aws/requests/simpledb/get_attributes.rb @@ -29,7 +29,7 @@ class Real # * 'RequestId' def get_attributes(domain_name, item_name, options = {}) if options.is_a?(Array) - Fog::Logger.warning("get_attributes with array attributes param is deprecated, use 'AttributeName' => attributes) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("get_attributes with array attributes param is deprecated, use 'AttributeName' => attributes) instead [light_black](#{caller.first})[/]") options = {'AttributeName' => options} end options['AttributeName'] ||= [] @@ -49,7 +49,7 @@ class Mock def get_attributes(domain_name, item_name, options = {}) if options.is_a?(Array) - Fog::Logger.warning("get_attributes with array attributes param is deprecated, use 'AttributeName' => attributes) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("get_attributes with array attributes param is deprecated, use 'AttributeName' => attributes) instead [light_black](#{caller.first})[/]") options['AttributeName'] ||= options if options.is_a?(Array) end options['AttributeName'] ||= [] diff --git a/lib/fog/aws/requests/simpledb/select.rb b/lib/fog/aws/requests/simpledb/select.rb index 226bd1891c..1b2c9cb975 100644 --- a/lib/fog/aws/requests/simpledb/select.rb +++ b/lib/fog/aws/requests/simpledb/select.rb @@ -23,7 +23,7 @@ class Real # * 'NextToken'<~String> - offset to start with if there are are more domains to list def select(select_expression, options = {}) if options.is_a?(String) - Fog::Logger.warning("get_attributes with string next_token param is deprecated, use 'AttributeName' => attributes) instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("get_attributes with string next_token param is deprecated, use 'AttributeName' => attributes) instead [light_black](#{caller.first})[/]") options = {'NextToken' => options} end options['NextToken'] ||= nil diff --git a/lib/fog/aws/requests/sqs/change_message_visibility.rb b/lib/fog/aws/requests/sqs/change_message_visibility.rb index 2902f74047..7f34281bb8 100644 --- a/lib/fog/aws/requests/sqs/change_message_visibility.rb +++ b/lib/fog/aws/requests/sqs/change_message_visibility.rb @@ -27,6 +27,36 @@ def change_message_visibility(queue_url, receipt_handle, visibility_timeout) end end + + class Mock + + def change_message_visibility(queue_url, receipt_handle, visibility_timeout) + Excon::Response.new.tap do |response| + if (queue = data[:queues][queue_url]) + message_id, _ = queue[:receipt_handles].find { |message_id, receipts| + receipts.keys.include?(receipt_handle) + } + + if message_id + queue[:messages][message_id]['Attributes']['VisibilityTimeout'] = visibility_timeout + response.body = { + 'ResponseMetadata' => { + 'RequestId' => Fog::AWS::Mock.request_id + } + } + response.status = 200 + else + response.status = 404 + raise(Excon::Errors.status_error({:expects => 200}, response)) + end + else + response.status = 404 + raise(Excon::Errors.status_error({:expects => 200}, response)) + end + end + end + + end end end diff --git a/lib/fog/aws/requests/sqs/create_queue.rb b/lib/fog/aws/requests/sqs/create_queue.rb index 610dd79467..4cdccaec9d 100644 --- a/lib/fog/aws/requests/sqs/create_queue.rb +++ b/lib/fog/aws/requests/sqs/create_queue.rb @@ -25,7 +25,41 @@ def create_queue(name, options = {}) end end - + + class Mock + def create_queue(name, options = {}) + Excon::Response.new.tap do |response| + response.status = 200 + + now = Time.now + queue_url = "https://queue.amazonaws.com/#{data[:owner_id]}/#{name}" + queue = { + 'QueueName' => name, + 'Attributes' => { + 'VisibilityTimeout' => 30, + 'ApproximateNumberOfMessages' => 0, + 'ApproximateNumberOfMessagesNotVisible' => 0, + 'CreatedTimestamp' => now, + 'LastModifiedTimestamp' => now, + 'QueueArn' => Fog::AWS::Mock.arn('sqs', 'us-east-1', data[:owner_id], name), + 'MaximumMessageSize' => 8192, + 'MessageRetentionPeriod' => 345600 + }, + :messages => {}, + :receipt_handles => {} + } + data[:queues][queue_url] = queue unless data[:queues][queue_url] + + response.body = { + 'ResponseMetadata' => { + 'RequestId' => Fog::AWS::Mock.request_id + }, + 'QueueUrl' => queue_url + } + end + + end + end end end end diff --git a/lib/fog/aws/requests/sqs/delete_message.rb b/lib/fog/aws/requests/sqs/delete_message.rb index c17e65dfe9..a69b1c29f8 100644 --- a/lib/fog/aws/requests/sqs/delete_message.rb +++ b/lib/fog/aws/requests/sqs/delete_message.rb @@ -25,7 +25,36 @@ def delete_message(queue_url, receipt_handle) end end + + class Mock + def delete_message(queue_url, receipt_handle) + Excon::Response.new.tap do |response| + if (queue = data[:queues][queue_url]) + message_id, _ = queue[:receipt_handles].find { |msg_id, receipts| + receipts.keys.include?(receipt_handle) + } + + if message_id + queue[:receipt_handles].delete(message_id) + queue[:messages].delete(message_id) + queue['Attributes']['LastModifiedTimestamp'] = Time.now + end + + response.body = { + 'ResponseMetadata' => { + 'RequestId' => Fog::AWS::Mock.request_id + } + } + response.status = 200 + else + response.status = 404 + raise(Excon::Errors.status_error({:expects => 200}, response)) + end + end + end + + end end end end diff --git a/lib/fog/aws/requests/sqs/delete_queue.rb b/lib/fog/aws/requests/sqs/delete_queue.rb index 2b5d93a279..8d122f0f0a 100644 --- a/lib/fog/aws/requests/sqs/delete_queue.rb +++ b/lib/fog/aws/requests/sqs/delete_queue.rb @@ -23,7 +23,30 @@ def delete_queue(queue_url) end end - + + class Mock + + def delete_queue(queue_url) + Excon::Response.new.tap do |response| + if (queue = data[:queues][queue_url]) + response.status = 200 + + data[:queues].delete(queue_url) + + response.body = { + 'ResponseMetadata' => { + 'RequestId' => Fog::AWS::Mock.request_id + } + } + else + response.status = 404 + raise(Excon::Errors.status_error({:expects => 200}, response)) + end + end + end + + end + end end end diff --git a/lib/fog/aws/requests/sqs/get_queue_attributes.rb b/lib/fog/aws/requests/sqs/get_queue_attributes.rb index ff88b57ebd..75e852852a 100644 --- a/lib/fog/aws/requests/sqs/get_queue_attributes.rb +++ b/lib/fog/aws/requests/sqs/get_queue_attributes.rb @@ -25,7 +25,28 @@ def get_queue_attributes(queue_url, attribute_name) end end - + + class Mock + + def get_queue_attributes(queue_url, attribute_name) + Excon::Response.new.tap do |response| + if (queue = data[:queues][queue_url]) + response.status = 200 + + response.body = { + 'ResponseMetadata' => { + 'RequestId' => Fog::AWS::Mock.request_id + }, + 'Attributes' => queue['Attributes'] + } + else + response.status = 404 + raise(Excon::Errors.status_error({:expects => 200}, response)) + end + end + end + + end end end end diff --git a/lib/fog/aws/requests/sqs/list_queues.rb b/lib/fog/aws/requests/sqs/list_queues.rb index 05364cc67d..9059e6f5c3 100644 --- a/lib/fog/aws/requests/sqs/list_queues.rb +++ b/lib/fog/aws/requests/sqs/list_queues.rb @@ -22,7 +22,21 @@ def list_queues(options = {}) }.merge!(options)) end end - + + class Mock + def list_queues(options = {}) + Excon::Response.new.tap do |response| + response.status = 200 + + response.body = { + 'ResponseMetadata' => { + 'RequestId' => Fog::AWS::Mock.request_id + }, + 'QueueUrls' => data[:queues].keys + } + end + end + end end end end diff --git a/lib/fog/aws/requests/sqs/receive_message.rb b/lib/fog/aws/requests/sqs/receive_message.rb index b01c42a595..fa4c90366a 100644 --- a/lib/fog/aws/requests/sqs/receive_message.rb +++ b/lib/fog/aws/requests/sqs/receive_message.rb @@ -28,7 +28,61 @@ def receive_message(queue_url, options = {}) end end - + + class Mock + + def receive_message(queue_url, options = {}) + Excon::Response.new.tap do |response| + if (queue = data[:queues][queue_url]) + max_number_of_messages = options['MaxNumberOfMessages'] || 1 + now = Time.now + + keys = queue[:messages].keys[0, max_number_of_messages] + + messages = queue[:messages].values_at(*keys).map do |m| + message_id = m['MessageId'] + + invisible = if (received_handles = queue[:receipt_handles][message_id]) + visibility_timeout = m['Attributes']['VisibilityTimeout'] || queue['Attributes']['VisibilityTimeout'] + received_handles.any? { |handle, time| now < time + visibility_timeout } + else + false + end + + if invisible + nil + else + receipt_handle = Fog::Mock.random_base64(300) + + queue[:receipt_handles][message_id] ||= {} + queue[:receipt_handles][message_id][receipt_handle] = now + + m['Attributes'].tap do |attrs| + attrs['ApproximateFirstReceiveTimestamp'] ||= now + attrs['ApproximateReceiveCount'] = (attrs['ApproximateReceiveCount'] || 0) + 1 + end + + m.merge({ + 'ReceiptHandle' => receipt_handle + }) + end + end.compact + + response.body = { + 'ResponseMetadata' => { + 'RequestId' => Fog::AWS::Mock.request_id + }, + 'Message' => messages + } + response.status = 200 + else + response.status = 404 + raise(Excon::Errors.status_error({:expects => 200}, response)) + end + end + end + + end end end end diff --git a/lib/fog/aws/requests/sqs/send_message.rb b/lib/fog/aws/requests/sqs/send_message.rb index e066d62022..90d25bae79 100644 --- a/lib/fog/aws/requests/sqs/send_message.rb +++ b/lib/fog/aws/requests/sqs/send_message.rb @@ -25,7 +25,45 @@ def send_message(queue_url, message) end end - + + class Mock + + def send_message(queue_url, message) + Excon::Response.new.tap do |response| + if (queue = data[:queues][queue_url]) + response.status = 200 + + now = Time.now + message_id = Fog::AWS::Mock.sqs_message_id + md5 = Digest::MD5.hexdigest(message) + + queue[:messages][message_id] = { + 'MessageId' => message_id, + 'Body' => message, + 'MD5OfBody' => md5, + 'Attributes' => { + 'SenderId' => Fog::AWS::Mock.sqs_message_id, + 'SentTimestamp' => now + } + } + + queue['Attributes']['LastModifiedTimestamp'] = now + + response.body = { + 'ResponseMetadata' => { + 'RequestId' => Fog::AWS::Mock.request_id + }, + 'MessageId' => message_id, + 'MD5OfMessageBody' => md5 + } + else + response.status = 404 + raise(Excon::Errors.status_error({:expects => 200}, response)) + end + end + end + + end end end end diff --git a/lib/fog/aws/requests/sqs/set_queue_attributes.rb b/lib/fog/aws/requests/sqs/set_queue_attributes.rb index 38811bc4ce..10c7032d96 100644 --- a/lib/fog/aws/requests/sqs/set_queue_attributes.rb +++ b/lib/fog/aws/requests/sqs/set_queue_attributes.rb @@ -27,7 +27,25 @@ def set_queue_attributes(queue_url, attribute_name, attribute_value) end end - + + class Mock + def set_queue_attributes(queue_url, attribute_name, attribute_value) + Excon::Response.new.tap do |response| + if (queue = data[:queues][queue_url]) + response.status = 200 + queue['Attributes'][attribute_name] = attribute_value + response.body = { + 'ResponseMetadata' => { + 'RequestId' => Fog::AWS::Mock.request_id + } + } + else + response.status = 404 + raise(Excon::Errors.status_error({:expects => 200}, response)) + end + end + end + end end end end diff --git a/lib/fog/aws/requests/storage/copy_object.rb b/lib/fog/aws/requests/storage/copy_object.rb index dd7a16d557..ec510cfa6b 100644 --- a/lib/fog/aws/requests/storage/copy_object.rb +++ b/lib/fog/aws/requests/storage/copy_object.rb @@ -55,7 +55,7 @@ def copy_object(source_bucket_name, source_object_name, target_bucket_name, targ response.status = 200 target_object = source_object.dup target_object.merge!({ - 'Name' => target_object_name + 'Key' => target_object_name }) target_bucket[:objects][target_object_name] = target_object response.body = { diff --git a/lib/fog/aws/requests/storage/get_object_acl.rb b/lib/fog/aws/requests/storage/get_object_acl.rb index dbd81389e7..f4ebe103f9 100644 --- a/lib/fog/aws/requests/storage/get_object_acl.rb +++ b/lib/fog/aws/requests/storage/get_object_acl.rb @@ -59,7 +59,7 @@ def get_object_acl(bucket_name, object_name, options = {}) class Mock # :nodoc:all - def get_object_acl(bucket_name, object_name) + def get_object_acl(bucket_name, object_name, options = {}) response = Excon::Response.new if acl = self.data[:acls][:object][bucket_name] && self.data[:acls][:object][bucket_name][object_name] response.status = 200 diff --git a/lib/fog/aws/requests/storage/get_object_url.rb b/lib/fog/aws/requests/storage/get_object_url.rb index 7d700b7bb9..f3f5f67540 100644 --- a/lib/fog/aws/requests/storage/get_object_url.rb +++ b/lib/fog/aws/requests/storage/get_object_url.rb @@ -18,7 +18,7 @@ class Real # http://docs.amazonwebservices.com/AmazonS3/latest/dev/S3_QSAuth.html def get_object_url(bucket_name, object_name, expires) - Fog::Logger.warning("Fog::Storage::AWS => #get_object_url is deprecated, use #get_object_https_url instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("Fog::Storage::AWS => #get_object_url is deprecated, use #get_object_https_url instead [light_black](#{caller.first})[/]") get_object_https_url(bucket_name, object_name, expires) end @@ -27,7 +27,7 @@ def get_object_url(bucket_name, object_name, expires) class Mock # :nodoc:all def get_object_url(bucket_name, object_name, expires) - Fog::Logger.warning("Fog::Storage::AWS => #get_object_url is deprecated, use #get_object_https_url instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("Fog::Storage::AWS => #get_object_url is deprecated, use #get_object_https_url instead [light_black](#{caller.first})[/]") get_object_https_url(bucket_name, object_name, expires) end diff --git a/lib/fog/aws/requests/storage/hash_to_acl.rb b/lib/fog/aws/requests/storage/hash_to_acl.rb index 9846816409..31431f5dd2 100644 --- a/lib/fog/aws/requests/storage/hash_to_acl.rb +++ b/lib/fog/aws/requests/storage/hash_to_acl.rb @@ -15,19 +15,21 @@ def self.hash_to_acl(acl) acl['AccessControlList'].each do |grant| data << " \n" - type = case grant['Grantee'].keys.sort + case grant['Grantee'].keys.sort when ['DisplayName', 'ID'] - 'CanonicalUser' + data << " \n" + data << " #{grant['Grantee']['ID']}\n" + data << " #{grant['Grantee']['DisplayName']}\n" + data << " \n" when ['EmailAddress'] - 'AmazonCustomerByEmail' + data << " \n" + data << " #{grant['Grantee']['EmailAddress']}\n" + data << " \n" when ['URI'] - 'Group' + data << " \n" + data << " #{grant['Grantee']['URI']}\n" + data << " \n" end - data << " \n" - for key, value in grant['Grantee'] - data << " <#{key}>#{value}\n" - end - data << " \n" data << " #{grant['Permission']}\n" data << " \n" end @@ -40,6 +42,6 @@ def self.hash_to_acl(acl) data end end - end + end end diff --git a/lib/fog/aws/sns.rb b/lib/fog/aws/sns.rb index 572f0b68e6..4f409f0ca0 100644 --- a/lib/fog/aws/sns.rb +++ b/lib/fog/aws/sns.rb @@ -5,7 +5,7 @@ module AWS class SNS < Fog::Service requires :aws_access_key_id, :aws_secret_access_key - recognizes :host, :path, :port, :scheme, :persistent + recognizes :host, :path, :port, :scheme, :persistent, :region request_path 'fog/aws/requests/sns' request :add_permission diff --git a/lib/fog/aws/sqs.rb b/lib/fog/aws/sqs.rb index fe2a7b0048..87de2f62c2 100644 --- a/lib/fog/aws/sqs.rb +++ b/lib/fog/aws/sqs.rb @@ -19,8 +19,38 @@ class SQS < Fog::Service request :set_queue_attributes class Mock + def self.data + @data ||= Hash.new do |hash, region| + owner_id = Fog::AWS::Mock.owner_id + hash[region] = Hash.new do |region_hash, key| + region_hash[key] = { + :owner_id => Fog::AWS::Mock.owner_id, + :queues => {} + } + end + end + end + + def self.reset + @data = nil + end def initialize(options={}) + @aws_access_key_id = options[:aws_access_key_id] + + @region = options[:region] || 'us-east-1' + + unless ['ap-northeast-1', 'ap-southeast-1', 'eu-west-1', 'us-east-1', 'us-west-1'].include?(@region) + raise ArgumentError, "Unknown region: #{@region.inspect}" + end + end + + def data + self.class.data[@region][@aws_access_key_id] + end + + def reset_data + self.class.data[@region].delete(@aws_access_key_id) end end diff --git a/lib/fog/aws/storage.rb b/lib/fog/aws/storage.rb index d110e02a68..03414ebc80 100644 --- a/lib/fog/aws/storage.rb +++ b/lib/fog/aws/storage.rb @@ -76,7 +76,7 @@ def https_url(params, expires) end def url(params, expires) - Fog::Logger.warning("Fog::Storage::AWS => #url is deprecated, use #https_url instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("Fog::Storage::AWS => #url is deprecated, use #https_url instead [light_black](#{caller.first})[/]") https_url(params, expires) end @@ -229,7 +229,8 @@ class Real # :aws_secret_access_key in order to create a connection # # ==== Examples - # s3 = S3.new( + # s3 = Fog::Storage.new( + # :provider => "AWS", # :aws_access_key_id => your_aws_access_key_id, # :aws_secret_access_key => your_aws_secret_access_key # ) @@ -250,7 +251,11 @@ def initialize(options={}) if @endpoint = options[:endpoint] endpoint = URI.parse(@endpoint) @host = endpoint.host - @path = endpoint.path + @path = if endpoint.path.empty? + '/' + else + endpoint.path + end @port = endpoint.port @scheme = endpoint.scheme else diff --git a/lib/fog/bin.rb b/lib/fog/bin.rb index b34575100f..d0292a52d4 100644 --- a/lib/fog/bin.rb +++ b/lib/fog/bin.rb @@ -4,7 +4,7 @@ module Fog class << self def available_providers - @available_providers ||= @providers.select {|provider| Kernel.const_get(provider).available?}.sort + @available_providers ||= Fog.providers.values.select {|provider| Kernel.const_get(provider).available?}.sort end end @@ -68,11 +68,13 @@ def collections require 'fog/bin/new_servers' require 'fog/bin/ninefold' require 'fog/bin/rackspace' +require 'fog/bin/openstack' require 'fog/bin/slicehost' require 'fog/bin/stormondemand' require 'fog/bin/terremark' require 'fog/bin/vcloud' require 'fog/bin/virtual_box' +require 'fog/bin/vmfusion' require 'fog/bin/vsphere' require 'fog/bin/voxel' require 'fog/bin/zerigo' diff --git a/lib/fog/bin/aws.rb b/lib/fog/bin/aws.rb index bb940c1c9a..485b6aa67f 100644 --- a/lib/fog/bin/aws.rb +++ b/lib/fog/bin/aws.rb @@ -15,6 +15,8 @@ def class_for(key) Fog::Compute::AWS when :dns Fog::DNS::AWS + when :elasticache + Fog::AWS::Elasticache when :elb Fog::AWS::ELB when :emr @@ -47,18 +49,20 @@ def [](service) when :auto_scaling Fog::AWS::AutoScaling.new when :cdn - Fog::Logger.warning("AWS[:cdn] is deprecated, use CDN[:aws] instead") + Fog::Logger.warning("AWS[:cdn] is not recommended, use CDN[:aws] for portability") Fog::CDN.new(:provider => 'AWS') when :cloud_formation Fog::AWS::CloudFormation.new when :cloud_watch Fog::AWS::CloudWatch.new when :compute - Fog::Logger.warning("AWS[:compute] is deprecated, use Compute[:aws] instead") + Fog::Logger.warning("AWS[:compute] is not recommended, use Comptue[:aws] for portability") Fog::Compute.new(:provider => 'AWS') when :dns - Fog::Logger.warning("AWS[:dns] is deprecated, use DNS[:aws] instead") + Fog::Logger.warning("AWS[:dns] is not recommended, use DNS[:aws] for portability") Fog::DNS.new(:provider => 'AWS') + when :elasticache + Fog::AWS::Elasticache.new when :elb Fog::AWS::ELB.new when :emr @@ -76,7 +80,7 @@ def [](service) when :sqs Fog::AWS::SQS.new when :storage - Fog::Logger.warning("AWS[:storage] is deprecated, use Storage[:aws] instead") + Fog::Logger.warning("AWS[:storage] is not recommended, use Storage[:aws] for portability") Fog::Storage.new(:provider => 'AWS') when :sns Fog::AWS::SNS.new diff --git a/lib/fog/bin/bluebox.rb b/lib/fog/bin/bluebox.rb index 0e24a88f5d..9a28d2a431 100644 --- a/lib/fog/bin/bluebox.rb +++ b/lib/fog/bin/bluebox.rb @@ -16,10 +16,10 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("Bluebox[:compute] is deprecated, use Compute[:bluebox] instead") + Fog::Logger.warning("Bluebox[:compute] is not recommended, use Compute[:bluebox] for portability") Fog::Compute.new(:provider => 'Bluebox') when :dns - Fog::Logger.warning("Bluebox[:storage] is deprecated, use Storage[:bluebox] instead") + Fog::Logger.warning("Bluebox[:dns] is not recommended, use DNS[:bluebox] for portability") Fog::DNS.new(:provider => 'Bluebox') else raise ArgumentError, "Unrecognized service: #{service}" diff --git a/lib/fog/bin/brightbox.rb b/lib/fog/bin/brightbox.rb index e0e7490100..47b7e1607f 100644 --- a/lib/fog/bin/brightbox.rb +++ b/lib/fog/bin/brightbox.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("Brightbox[:compute] is deprecated, use Compute[:brightbox] instead") + Fog::Logger.warning("Brightbox[:compute] is not recommended, use Compute[:brightbox] for portability") Fog::Compute.new(:provider => 'Brightbox') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/dnsimple.rb b/lib/fog/bin/dnsimple.rb index 5dd037d2a4..4de858510f 100644 --- a/lib/fog/bin/dnsimple.rb +++ b/lib/fog/bin/dnsimple.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :dns - Fog::Logger.warning("DNSimple[:dns] is deprecated, use Storage[:dnsimple] instead") + Fog::Logger.warning("DNSimple[:dns] is not recommended, use DNS[:dnsimple] for portability") Fog::DNS.new(:provider => 'DNSimple') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/dnsmadeeasy.rb b/lib/fog/bin/dnsmadeeasy.rb index 08113c9d40..a5bba033cb 100644 --- a/lib/fog/bin/dnsmadeeasy.rb +++ b/lib/fog/bin/dnsmadeeasy.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :dns - Fog::Logger.warning("DNSMadeEasy[:dns] is deprecated, use Storage[:dnsmadeeasy] instead") + Fog::Logger.warning("DNSMadeEasy[:dns] is not recommended, use DNS[:dnsmadeeasy] for portability") Fog::DNS.new(:provider => 'DNSMadeEasy') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/ecloud.rb b/lib/fog/bin/ecloud.rb index 73cdf2a7b7..ad807a8f3a 100644 --- a/lib/fog/bin/ecloud.rb +++ b/lib/fog/bin/ecloud.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("Ecloud[:compute] is deprecated, use Compute[:ecloud] instead") + Fog::Logger.warning("Ecloud[:compute] is not recommended, use Compute[:ecloud] for portability") Fog::Compute.new(:provider => 'Ecloud') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/glesys.rb b/lib/fog/bin/glesys.rb index 10ab485291..83e364dac0 100644 --- a/lib/fog/bin/glesys.rb +++ b/lib/fog/bin/glesys.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Formatador.display_line("[yellow][WARN] Glesys[:compute] is deprecated, use Compute[:glesys] instead[/]") + Fog::Logger.warning("Glesys[:compute] is not recommended, use Compute[:glesys] for portability") Fog::Compute.new(:provider => 'Glesys') else raise ArgumentError, "Unrecognized service: #{service}" diff --git a/lib/fog/bin/go_grid.rb b/lib/fog/bin/go_grid.rb index efe7914d31..bea8352dcd 100644 --- a/lib/fog/bin/go_grid.rb +++ b/lib/fog/bin/go_grid.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("GoGrid[:compute] is deprecated, use Compute[:gogrid] instead") + Fog::Logger.warning("GoGrid[:compute] is not recommended, use Compute[:gogrid] for portability") Fog::Compute.new(:provider => 'GoGrid') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/google.rb b/lib/fog/bin/google.rb index 484a2a77d7..814bad8e60 100644 --- a/lib/fog/bin/google.rb +++ b/lib/fog/bin/google.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :storage - Fog::Logger.warning("Google[:storage] is deprecated, use Storage[:google] instead") + Fog::Logger.warning("Google[:storage] is not recommended, use Storage[:google] for portability") Fog::Storage.new(:provider => 'Google') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/libvirt.rb b/lib/fog/bin/libvirt.rb index 4cc0fae196..6e003285a6 100644 --- a/lib/fog/bin/libvirt.rb +++ b/lib/fog/bin/libvirt.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("Libvirt[:compute] is deprecated, use Compute[:libvirt] instead") + Fog::Logger.warning("Libvirt[:compute] is not recommended, use Compute[:libvirt] for portability") Fog::Compute.new(:provider => 'Libvirt') else raise ArgumentError, "Unrecognized service: #{key.inspect}" @@ -30,8 +30,8 @@ def available? availability=false rescue availability_gem=Gem.available?("ruby-libvirt") - end - + end + if availability for service in services for collection in self.class_for(service).collections diff --git a/lib/fog/bin/linode.rb b/lib/fog/bin/linode.rb index ea9d67cea0..a94349d08a 100644 --- a/lib/fog/bin/linode.rb +++ b/lib/fog/bin/linode.rb @@ -16,10 +16,10 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("Linode[:compute] is deprecated, use Compute[:linode] instead") + Fog::Logger.warning("Linode[:compute] is not recommended, use Compute[:linode] for portability") Fog::Compute.new(:provider => 'Linode') when :dns - Fog::Logger.warning("Linode[:storage] is deprecated, use Storage[:linode] instead") + Fog::Logger.warning("Linode[:dns] is not recommended, use DNS[:linode] for portability") Fog::DNS.new(:provider => 'Linode') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/local.rb b/lib/fog/bin/local.rb index b8cadd62fa..7aa2d6bb6f 100644 --- a/lib/fog/bin/local.rb +++ b/lib/fog/bin/local.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :storage - Fog::Logger.warning("Local[:storage] is deprecated, use Storage[:local] instead") + Fog::Logger.warning("Local[:storage] is not recommended, use Storage[:local] for portability") Fog::Storage.new(:provider => 'Local') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/new_servers.rb b/lib/fog/bin/new_servers.rb index 3036812dc5..522c1ff316 100644 --- a/lib/fog/bin/new_servers.rb +++ b/lib/fog/bin/new_servers.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("NewServers[:compute] is deprecated, use Compute[:newservers] instead") + Fog::Logger.warning("NewServers[:compute] is not recommended, use Compute[:newservers] for portability") Fog::Compute.new(:provider => 'NewServers') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/ninefold.rb b/lib/fog/bin/ninefold.rb index 58c43d400c..3a3911295c 100644 --- a/lib/fog/bin/ninefold.rb +++ b/lib/fog/bin/ninefold.rb @@ -16,10 +16,10 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("Ninefold[:compute] is deprecated, use Compute[:ninefold] instead") + Fog::Logger.warning("Ninefold[:compute] is not recommended, use Compute[:ninefold] for portability") Fog::Compute.new(:provider => 'Ninefold') when :storage - Fog::Logger.warning("Ninefold[:storage] is deprecated, use Storage[:ninefold] instead") + Fog::Logger.warning("Ninefold[:storage] is not recommended, use Storage[:ninefold] for portability") Fog::Storage.new(:provider => 'Ninefold') else raise ArgumentError, "Unrecognized service: #{service}" diff --git a/lib/fog/bin/openstack.rb b/lib/fog/bin/openstack.rb new file mode 100644 index 0000000000..b0a9379ccc --- /dev/null +++ b/lib/fog/bin/openstack.rb @@ -0,0 +1,31 @@ +class OpenStack < Fog::Bin + class << self + + def class_for(key) + case key + when :compute + Fog::Compute::OpenStack + else + raise ArgumentError, "Unrecognized service: #{key}" + end + end + + def [](service) + @@connections ||= Hash.new do |hash, key| + hash[key] = case key + when :compute + Fog::Logger.warning("OpenStack[:compute] is not recommended, use Compute[:rackspace] for portability") + Fog::Compute.new(:provider => 'OpenStack') + else + raise ArgumentError, "Unrecognized service: #{key.inspect}" + end + end + @@connections[service] + end + + def services + Fog::OpenStack.services + end + + end +end diff --git a/lib/fog/bin/rackspace.rb b/lib/fog/bin/rackspace.rb index b36301d11f..281c7662a2 100644 --- a/lib/fog/bin/rackspace.rb +++ b/lib/fog/bin/rackspace.rb @@ -22,17 +22,17 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :cdn - Fog::Logger.warning("Rackspace[:cdn] is deprecated, use CDN[:rackspace] instead") + Fog::Logger.warning("Rackspace[:cdn] is not recommended, use CDN[:rackspace] for portability") Fog::CDN.new(:provider => 'Rackspace') when :compute - Fog::Logger.warning("Rackspace[:compute] is deprecated, use Compute[:rackspace] instead") + Fog::Logger.warning("Rackspace[:compute] is not recommended, use Compute[:rackspace] for portability") Fog::Compute.new(:provider => 'Rackspace') when :dns Fog::DNS.new(:provider => 'Rackspace') when :load_balancers Fog::Rackspace::LoadBalancers.new when :storage - Fog::Logger.warning("Rackspace[:storage] is deprecated, use Storage[:rackspace] instead") + Fog::Logger.warning("Rackspace[:storage] is not recommended, use Storage[:rackspace] for portability") Fog::Storage.new(:provider => 'Rackspace') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/slicehost.rb b/lib/fog/bin/slicehost.rb index 6c905bba84..084fb6675c 100644 --- a/lib/fog/bin/slicehost.rb +++ b/lib/fog/bin/slicehost.rb @@ -16,7 +16,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("Slicehost[:compute] is deprecated, use Compute[:slicehost] instead") + Fog::Logger.warning("Slicehost[:compute] is not recommended, use Compute[:slicehost] for portability") Fog::Compute.new(:provider => 'Slicehost') when :dns Fog::Logger.warning("Slicehost[:dns] is deprecated, use Storage[:slicehost] instead") diff --git a/lib/fog/bin/stormondemand.rb b/lib/fog/bin/stormondemand.rb index 0ec3d85bf2..23d03a9bc4 100644 --- a/lib/fog/bin/stormondemand.rb +++ b/lib/fog/bin/stormondemand.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("StormOnDemand[:compute] is deprecated, use Compute[:stormondemand] instead") + Fog::Logger.warning("StormOnDemand[:compute] is not recommended, use Compute[:stormondemand] for portability") Fog::Compute.new(:provider => 'StormOnDemand') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/virtual_box.rb b/lib/fog/bin/virtual_box.rb index 7732bb985c..9197b8254f 100644 --- a/lib/fog/bin/virtual_box.rb +++ b/lib/fog/bin/virtual_box.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("VirtualBox[:compute] is deprecated, use Compute[:virtualbox] instead") + Fog::Logger.warning("VirtualBox[:compute] is not recommended, use Compute[:virtualbox] for portability") Fog::Compute.new(:provider => 'VirtualBox') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/vmfusion.rb b/lib/fog/bin/vmfusion.rb new file mode 100644 index 0000000000..7ac8161b7c --- /dev/null +++ b/lib/fog/bin/vmfusion.rb @@ -0,0 +1,60 @@ +module Vmfusion # deviates from other bin stuff to accomodate gem + class << self + + def class_for(key) + case key + when :compute + Fog::Compute::Vmfusion + else + raise ArgumentError, "Unrecognized service: #{key}" + end + end + + def [](service) + @@connections ||= Hash.new do |hash, key| + hash[key] = case key + when :compute + Fog::Logger.warning("Vmfusion[:compute] is not recommended, use Compute[:vmfusion] for portability") + Fog::Compute.new(:provider => 'Vmfusion') + else + raise ArgumentError, "Unrecognized service: #{key.inspect}" + end + end + @@connections[service] + end + + def available? + begin + availability=true unless Gem::Specification::find_by_name("fission").nil? + rescue Gem::LoadError + availability=false + rescue + availability_gem=Gem.available?("fission") + end + + if availability + for service in services + for collection in self.class_for(service).collections + unless self.respond_to?(collection) + self.class_eval <<-EOS, __FILE__, __LINE__ + def self.#{collection} + self[:#{service}].#{collection} + end + EOS + end + end + end + end + availability + end + + def collections + services.map {|service| self[service].collections}.flatten.sort_by {|service| service.to_s} + end + + def services + Fog::Vmfusion.services + end + + end +end diff --git a/lib/fog/bin/voxel.rb b/lib/fog/bin/voxel.rb index 8972ccff51..a4dc218f13 100644 --- a/lib/fog/bin/voxel.rb +++ b/lib/fog/bin/voxel.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :compute - Fog::Logger.warning("Voxel[:compute] is deprecated, use Compute[:voxel] instead") + Fog::Logger.warning("Voxel[:comupte] is not recommended, use Compute[:voxel]] for portability") Fog::Compute.new(:provider => 'Voxel') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bin/zerigo.rb b/lib/fog/bin/zerigo.rb index 598c5ec5c7..2e0a6e04d2 100644 --- a/lib/fog/bin/zerigo.rb +++ b/lib/fog/bin/zerigo.rb @@ -14,7 +14,7 @@ def [](service) @@connections ||= Hash.new do |hash, key| hash[key] = case key when :dns - Fog::Logger.warning("Zerigo[:dns] is deprecated, use Storage[:zerigo] instead") + Fog::Logger.warning("Zerigo[:dns] is not recommended, use DNS[:zerigo] for portability") Fog::DNS.new(:provider => 'Zerigo') else raise ArgumentError, "Unrecognized service: #{key.inspect}" diff --git a/lib/fog/bluebox.rb b/lib/fog/bluebox.rb index f06c09789e..b59cd948ce 100644 --- a/lib/fog/bluebox.rb +++ b/lib/fog/bluebox.rb @@ -5,8 +5,8 @@ module Bluebox extend Fog::Provider - service(:compute, 'bluebox/compute') - service(:dns, 'bluebox/dns') + service(:compute, 'bluebox/compute', 'Compute') + service(:dns, 'bluebox/dns', 'DNS') end end diff --git a/lib/fog/bluebox/models/compute/server.rb b/lib/fog/bluebox/models/compute/server.rb index 83e8e85401..39e98ab812 100644 --- a/lib/fog/bluebox/models/compute/server.rb +++ b/lib/fog/bluebox/models/compute/server.rb @@ -21,7 +21,7 @@ class Server < Fog::Compute::Server attribute :storage attribute :template - attr_accessor :password, :lb_applications, :lb_services, :lb_backends + attr_accessor :hostname, :password, :lb_applications, :lb_services, :lb_backends attr_writer :private_key, :private_key_path, :public_key, :public_key_path, :username def initialize(attributes={}) diff --git a/lib/fog/bluebox/requests/compute/create_template.rb b/lib/fog/bluebox/requests/compute/create_template.rb index fcc9e0dba4..9cbc7a49a6 100644 --- a/lib/fog/bluebox/requests/compute/create_template.rb +++ b/lib/fog/bluebox/requests/compute/create_template.rb @@ -14,7 +14,7 @@ class Real # TODO def create_template(block_id, options={}) request( - :expects => 200, + :expects => 202, :method => 'POST', :path => "api/block_templates.json", :query => {'id' => block_id}.merge!(options) diff --git a/lib/fog/brightbox.rb b/lib/fog/brightbox.rb index c20ef4ace6..99f5f54781 100644 --- a/lib/fog/brightbox.rb +++ b/lib/fog/brightbox.rb @@ -4,7 +4,7 @@ module Fog module Brightbox extend Fog::Provider - service(:compute, 'brightbox/compute') + service(:compute, 'brightbox/compute', 'Compute') end end diff --git a/lib/fog/brightbox/compute.rb b/lib/fog/brightbox/compute.rb index 17824eb7fe..0a44191693 100644 --- a/lib/fog/brightbox/compute.rb +++ b/lib/fog/brightbox/compute.rb @@ -8,12 +8,14 @@ class Brightbox < Fog::Service API_URL = "https://api.gb1.brightbox.com/" requires :brightbox_client_id, :brightbox_secret - recognizes :brightbox_auth_url, :brightbox_api_url + recognizes :brightbox_auth_url, :brightbox_api_url, :persistent model_path 'fog/brightbox/models/compute' model :account # Singular resource, no collection collection :servers model :server + collection :server_groups + model :server_group collection :flavors model :flavor collection :images @@ -32,14 +34,19 @@ class Brightbox < Fog::Service request :add_listeners_load_balancer request :add_nodes_load_balancer request :add_servers_server_group + request :apply_to_firewall_policy request :create_api_client request :create_cloud_ip + request :create_firewall_policy + request :create_firewall_rule request :create_image request :create_load_balancer request :create_server request :create_server_group request :destroy_api_client request :destroy_cloud_ip + request :destroy_firewall_policy + request :destroy_firewall_rule request :destroy_image request :destroy_load_balancer request :destroy_server @@ -47,6 +54,8 @@ class Brightbox < Fog::Service request :get_account request :get_api_client request :get_cloud_ip + request :get_firewall_policy + request :get_firewall_rule request :get_image request :get_interface request :get_load_balancer @@ -57,6 +66,7 @@ class Brightbox < Fog::Service request :get_zone request :list_api_clients request :list_cloud_ips + request :list_firewall_policies request :list_images request :list_load_balancers request :list_server_groups @@ -106,7 +116,7 @@ def initialize(options) @connection_options = options[:connection_options] || {} @brightbox_client_id = options[:brightbox_client_id] || Fog.credentials[:brightbox_client_id] @brightbox_secret = options[:brightbox_secret] || Fog.credentials[:brightbox_secret] - @persistent = options[:peristent] || false + @persistent = options[:persistent] || false @connection = Fog::Connection.new(@api_url, @persistent, @connection_options) end diff --git a/lib/fog/brightbox/models/compute/account.rb b/lib/fog/brightbox/models/compute/account.rb index 22e1e69c40..c37f6fec5c 100644 --- a/lib/fog/brightbox/models/compute/account.rb +++ b/lib/fog/brightbox/models/compute/account.rb @@ -53,7 +53,7 @@ class Account < Fog::Model def reset_ftp_password requires :identity - connection.reset_ftp_password_account(identity)["library_ftp_password"] + connection.reset_ftp_password_account["library_ftp_password"] end end diff --git a/lib/fog/brightbox/models/compute/server_group.rb b/lib/fog/brightbox/models/compute/server_group.rb new file mode 100644 index 0000000000..5070a83acc --- /dev/null +++ b/lib/fog/brightbox/models/compute/server_group.rb @@ -0,0 +1,57 @@ +require 'fog/core/model' + +module Fog + module Compute + class Brightbox + + # A server group is a collection of servers + # + # Certain actions can accept a server group and affect all members + class ServerGroup < Fog::Model + + identity :id + + attribute :url + attribute :resource_type + attribute :name + attribute :description + attribute :default + + def save + requires :name + options = { + :name => name, + :description => description + }.delete_if {|k,v| v.nil? || v == "" } + data = connection.create_server_group(options) + merge_attributes(data) + true + end + + # Add a server to the server group + # + # == Parameters: + # identifiers:: + # An array of identifiers for the servers to add to the group + # + # == Returns: + # + # An excon response object representing the result + # + # ident} } + options = { + :servers => server_references + } + data = connection.add_servers_server_group(identity, options) + merge_attributes(data) + end + + end + + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/models/compute/server_groups.rb b/lib/fog/brightbox/models/compute/server_groups.rb new file mode 100644 index 0000000000..9fe3b942ae --- /dev/null +++ b/lib/fog/brightbox/models/compute/server_groups.rb @@ -0,0 +1,29 @@ +require 'fog/core/collection' +require 'fog/brightbox/models/compute/server_group' + +module Fog + module Compute + class Brightbox + + class ServerGroups < Fog::Collection + + model Fog::Compute::Brightbox::ServerGroup + + def all + data = connection.list_server_groups + load(data) + end + + def get(identifier) + return nil if identifier.nil? || identifier == "" + data = connection.get_server_group(identifier) + new(data) + rescue Excon::Errors::NotFound + nil + end + + end + + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/requests/compute/apply_to_firewall_policy.rb b/lib/fog/brightbox/requests/compute/apply_to_firewall_policy.rb new file mode 100644 index 0000000000..bbc5093b52 --- /dev/null +++ b/lib/fog/brightbox/requests/compute/apply_to_firewall_policy.rb @@ -0,0 +1,14 @@ +module Fog + module Compute + class Brightbox + class Real + + def apply_to_firewall_policy(identifier, options) + return nil if identifier.nil? || identifier == "" + request("post", "/1.0/firewall_policies/#{identifier}/apply_to", [202], options) + end + + end + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/requests/compute/create_firewall_policy.rb b/lib/fog/brightbox/requests/compute/create_firewall_policy.rb new file mode 100644 index 0000000000..0a1c817a2a --- /dev/null +++ b/lib/fog/brightbox/requests/compute/create_firewall_policy.rb @@ -0,0 +1,13 @@ +module Fog + module Compute + class Brightbox + class Real + + def create_firewall_policy(options) + request("post", "/1.0/firewall_policies", [201], options) + end + + end + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/requests/compute/create_firewall_rule.rb b/lib/fog/brightbox/requests/compute/create_firewall_rule.rb new file mode 100644 index 0000000000..882cb1d43a --- /dev/null +++ b/lib/fog/brightbox/requests/compute/create_firewall_rule.rb @@ -0,0 +1,13 @@ +module Fog + module Compute + class Brightbox + class Real + + def create_firewall_rule(options) + request("post", "/1.0/firewall_rules", [202], options) + end + + end + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/requests/compute/destroy_firewall_policy.rb b/lib/fog/brightbox/requests/compute/destroy_firewall_policy.rb new file mode 100644 index 0000000000..71b1300389 --- /dev/null +++ b/lib/fog/brightbox/requests/compute/destroy_firewall_policy.rb @@ -0,0 +1,14 @@ +module Fog + module Compute + class Brightbox + class Real + + def destroy_firewall_policy(identifier) + return nil if identifier.nil? || identifier == "" + request("delete", "/1.0/firewall_policies/#{identifier}", [202]) + end + + end + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/requests/compute/destroy_firewall_rule.rb b/lib/fog/brightbox/requests/compute/destroy_firewall_rule.rb new file mode 100644 index 0000000000..f31f995942 --- /dev/null +++ b/lib/fog/brightbox/requests/compute/destroy_firewall_rule.rb @@ -0,0 +1,14 @@ +module Fog + module Compute + class Brightbox + class Real + + def destroy_firewall_rule(identifier) + return nil if identifier.nil? || identifier == "" + request("delete", "/1.0/firewall_rules/#{identifier}", [202]) + end + + end + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/requests/compute/get_firewall_policy.rb b/lib/fog/brightbox/requests/compute/get_firewall_policy.rb new file mode 100644 index 0000000000..f163e76965 --- /dev/null +++ b/lib/fog/brightbox/requests/compute/get_firewall_policy.rb @@ -0,0 +1,14 @@ +module Fog + module Compute + class Brightbox + class Real + + def get_firewall_policy(identifier) + return nil if identifier.nil? || identifier == "" + request("get", "/1.0/firewall_policies/#{identifier}", [200]) + end + + end + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/requests/compute/get_firewall_rule.rb b/lib/fog/brightbox/requests/compute/get_firewall_rule.rb new file mode 100644 index 0000000000..8bbff6216c --- /dev/null +++ b/lib/fog/brightbox/requests/compute/get_firewall_rule.rb @@ -0,0 +1,14 @@ +module Fog + module Compute + class Brightbox + class Real + + def get_firewall_rule(identifier) + return nil if identifier.nil? || identifier == "" + request("get", "/1.0/firewall_rules/#{identifier}", [200]) + end + + end + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/requests/compute/list_firewall_policies.rb b/lib/fog/brightbox/requests/compute/list_firewall_policies.rb new file mode 100644 index 0000000000..455429915a --- /dev/null +++ b/lib/fog/brightbox/requests/compute/list_firewall_policies.rb @@ -0,0 +1,13 @@ +module Fog + module Compute + class Brightbox + class Real + + def list_firewall_policies + request("get", "/1.0/firewall_policies", [200]) + end + + end + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/requests/compute/reset_secret_api_client.rb b/lib/fog/brightbox/requests/compute/reset_secret_api_client.rb new file mode 100644 index 0000000000..50b8a27883 --- /dev/null +++ b/lib/fog/brightbox/requests/compute/reset_secret_api_client.rb @@ -0,0 +1,14 @@ +module Fog + module Compute + class Brightbox + class Real + + def reset_secret_api_client(identifier) + return nil if identifier.nil? || identifier == "" + request("post", "/1.0/api_clients/#{identifier}/reset_secret", [200]) + end + + end + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/requests/compute/update_cloud_ip.rb b/lib/fog/brightbox/requests/compute/update_cloud_ip.rb new file mode 100644 index 0000000000..f3a66508ad --- /dev/null +++ b/lib/fog/brightbox/requests/compute/update_cloud_ip.rb @@ -0,0 +1,15 @@ +module Fog + module Compute + class Brightbox + class Real + + def update_cloud_ip(identifier, options) + return nil if identifier.nil? || identifier == "" + return nil if options.empty? || options.nil? + request("put", "/1.0/cloud_ips/#{identifier}", [200], options) + end + + end + end + end +end \ No newline at end of file diff --git a/lib/fog/brightbox/requests/compute/update_server_group.rb b/lib/fog/brightbox/requests/compute/update_server_group.rb index 0ea23f8047..e82d0f312a 100644 --- a/lib/fog/brightbox/requests/compute/update_server_group.rb +++ b/lib/fog/brightbox/requests/compute/update_server_group.rb @@ -6,7 +6,7 @@ class Real def update_server_group(identifier, options) return nil if identifier.nil? || identifier == "" return nil if options.empty? || options.nil? - request("put", "/1.0/server_groups/#{identifier}", [202]) + request("put", "/1.0/server_groups/#{identifier}", [202], options) end end diff --git a/lib/fog/compute.rb b/lib/fog/compute.rb index 5f950b9ce8..6f8b7fa8ba 100644 --- a/lib/fog/compute.rb +++ b/lib/fog/compute.rb @@ -38,6 +38,9 @@ def self.new(attributes) when :ninefold require 'fog/ninefold/compute' Fog::Compute::Ninefold.new(attributes) + when :openstack + require 'fog/openstack/compute' + Fog::Compute::OpenStack.new(attributes) when :rackspace require 'fog/rackspace/compute' Fog::Compute::Rackspace.new(attributes) @@ -53,6 +56,9 @@ def self.new(attributes) when :virtualbox require 'fog/virtual_box/compute' Fog::Compute::VirtualBox.new(attributes) + when :vmfusion + require 'fog/vmfusion/compute' + Fog::Compute::Vmfusion.new(attributes) when :voxel require 'fog/voxel/compute' Fog::Compute::Voxel.new(attributes) diff --git a/lib/fog/compute/models/server.rb b/lib/fog/compute/models/server.rb index 8994154507..dfaf78a318 100644 --- a/lib/fog/compute/models/server.rb +++ b/lib/fog/compute/models/server.rb @@ -13,11 +13,10 @@ def scp(local_path, remote_path, upload_options = {}) Fog::SCP.new(public_ip_address, username, scp_options).upload(local_path, remote_path, upload_options) end - def ssh(commands) + def ssh(commands, options={}) require 'net/ssh' requires :public_ip_address, :username - options = {} options[:key_data] = [private_key] if private_key Fog::SSH.new(public_ip_address, username, options).run(commands) end diff --git a/lib/fog/core/attributes.rb b/lib/fog/core/attributes.rb index cab8d6f52e..3a72dd55b5 100644 --- a/lib/fog/core/attributes.rb +++ b/lib/fog/core/attributes.rb @@ -25,9 +25,9 @@ def #{name} class_eval <<-EOS, __FILE__, __LINE__ def #{name}=(new_#{name}) attributes[:#{name}] = case new_#{name} - when 'true' + when true,'true' true - when 'false' + when false,'false' false end end @@ -71,8 +71,8 @@ def #{name}=(new_#{name}) class_eval <<-EOS, __FILE__, __LINE__ def #{name}=(new_data) if new_data.is_a?(Hash) - if new_data.has_key?(:#{squash}) - attributes[:#{name}] = new_data[:#{squash}] + if new_data.has_key?(:'#{squash}') + attributes[:#{name}] = new_data[:'#{squash}'] elsif new_data.has_key?("#{squash}") attributes[:#{name}] = new_data["#{squash}"] else @@ -142,7 +142,7 @@ def merge_attributes(new_attributes = {}) unless self.class.ignored_attributes.include?(key) if aliased_key = self.class.aliases[key] send("#{aliased_key}=", value) - elsif (public_methods | private_methods).detect {|method| ["#{key}=", :"#{key}="].include?(method)} + elsif self.respond_to?("#{key}=",true) send("#{key}=", value) else attributes[key] = value diff --git a/lib/fog/core/credentials.rb b/lib/fog/core/credentials.rb index e62bc9a798..68a1602a48 100644 --- a/lib/fog/core/credentials.rb +++ b/lib/fog/core/credentials.rb @@ -45,6 +45,11 @@ def self.credentials end end + # @return [Hash] The newly assigned credentials + def self.credentials=(new_credentials) + @credentials = new_credentials + end + def self.symbolize_credentials(args) if args.is_a? Hash Hash[ *args.collect do |key, value| diff --git a/lib/fog/core/deprecation.rb b/lib/fog/core/deprecation.rb index 93f1a3fe0f..bd527b8fb4 100644 --- a/lib/fog/core/deprecation.rb +++ b/lib/fog/core/deprecation.rb @@ -4,7 +4,7 @@ module Deprecation def deprecate(older, newer) module_eval <<-EOS, __FILE__, __LINE__ def #{older}(*args) - Fog::Logger.warning("#{self} => ##{older} is deprecated, use ##{newer} instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("#{self} => ##{older} is deprecated, use ##{newer} instead [light_black](#{caller.first})[/]") send(:#{newer}, *args) end EOS @@ -13,7 +13,7 @@ def #{older}(*args) def self_deprecate(older, newer) module_eval <<-EOS, __FILE__, __LINE__ def self.#{older}(*args) - Fog::Logger.warning("#{self} => ##{older} is deprecated, use ##{newer} instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("#{self} => ##{older} is deprecated, use ##{newer} instead [light_black](#{caller.first})[/]") send(:#{newer}, *args) end EOS diff --git a/lib/fog/core/errors.rb b/lib/fog/core/errors.rb index b107bbe7a9..8b3144fc8b 100644 --- a/lib/fog/core/errors.rb +++ b/lib/fog/core/errors.rb @@ -48,6 +48,10 @@ def self.missing_credentials :new_servers_username: :public_key_path: :private_key_path: + :openstack_api_key: + :openstack_username: + :openstack_auth_url: + :openstack_tenant: :rackspace_api_key: :rackspace_username: :rackspace_servicenet: diff --git a/lib/fog/core/logger.rb b/lib/fog/core/logger.rb index 0549d9d1cc..b66ac3a84d 100644 --- a/lib/fog/core/logger.rb +++ b/lib/fog/core/logger.rb @@ -2,7 +2,8 @@ module Fog class Logger @channels = { - :warning => ::STDOUT + :deprecation => ::STDOUT, + :warning => ::STDOUT } def self.[](channel) @@ -14,11 +15,15 @@ def self.[]=(channel, value) end def self.debug(message) - self.write(:debug, "[light_black][DEBUG] #{message}[/]") + self.write(:debug, "[light_black][DEBUG] #{message}[/]\n") + end + + def self.deprecation(message) + self.write(:deprecation, "[yellow][DEPRECATION] #{message}[/]\n") end def self.warning(message) - self.write(:warning, "[yellow][WARNING] #{message}[/]") + self.write(:warning, "[yellow][WARNING] #{message}[/]\n") end def self.write(key, value) diff --git a/lib/fog/core/provider.rb b/lib/fog/core/provider.rb index 497b11548e..b345bbc48d 100644 --- a/lib/fog/core/provider.rb +++ b/lib/fog/core/provider.rb @@ -1,24 +1,34 @@ module Fog def self.providers - @providers ||= [] + @providers ||= {} + end + + def self.providers=(new_providers) + @providers = new_providers end module Provider def self.extended(base) - Fog.providers << base.to_s.split('::').last + provider = base.to_s.split('::').last + Fog.providers[provider.downcase.to_sym] = provider + end + + def [](service_key) + eval(@services_registry[service_key]).new end - def service(new_service, path) + def service(new_service, path, constant_string) Fog.services[new_service] ||= [] - Fog.services[new_service] << self.to_s.split('::').last.downcase.to_sym - self.services << new_service + Fog.services[new_service] |= [self.to_s.split('::').last.downcase.to_sym] + @services_registry ||= {} + @services_registry[new_service] = [self.to_s, constant_string].join('::') require File.join('fog', path) end def services - @services ||= [] + @services_registry.keys end end diff --git a/lib/fog/core/service.rb b/lib/fog/core/service.rb index 84dc042d27..876aba6fb7 100644 --- a/lib/fog/core/service.rb +++ b/lib/fog/core/service.rb @@ -172,7 +172,7 @@ def recognizes(*args) end def recognized - @recognized ||= [] + @recognized ||= [:connection_options] end def validate_options(options) diff --git a/lib/fog/dnsimple.rb b/lib/fog/dnsimple.rb index 16bba4a657..81ce6fa529 100644 --- a/lib/fog/dnsimple.rb +++ b/lib/fog/dnsimple.rb @@ -5,7 +5,7 @@ module DNSimple extend Fog::Provider - service(:dns, 'dnsimple/dns') + service(:dns, 'dnsimple/dns', 'DNS') end end diff --git a/lib/fog/dnsmadeeasy.rb b/lib/fog/dnsmadeeasy.rb index 2c0270e3a4..052f6cf55f 100644 --- a/lib/fog/dnsmadeeasy.rb +++ b/lib/fog/dnsmadeeasy.rb @@ -5,7 +5,7 @@ module DNSMadeEasy extend Fog::Provider - service(:dns, 'dnsmadeeasy/dns') + service(:dns, 'dnsmadeeasy/dns', 'DNS') end end diff --git a/lib/fog/dynect.rb b/lib/fog/dynect.rb index 0e9ff8cb00..21bd9d2387 100644 --- a/lib/fog/dynect.rb +++ b/lib/fog/dynect.rb @@ -7,7 +7,7 @@ module Fog module Dynect extend Fog::Provider - service(:dns, 'dynect/dns') + service(:dns, 'dynect/dns', 'DNS') class Mock def self.job_id diff --git a/lib/fog/ecloud.rb b/lib/fog/ecloud.rb index bddb0f15dd..3031c450e6 100644 --- a/lib/fog/ecloud.rb +++ b/lib/fog/ecloud.rb @@ -5,7 +5,7 @@ module Ecloud extend Fog::Provider - service(:compute, 'ecloud/compute') + service(:compute, 'ecloud/compute', 'Compute') end end diff --git a/lib/fog/ecloud/compute.rb b/lib/fog/ecloud/compute.rb index 9eb71c56fd..19566d9fef 100644 --- a/lib/fog/ecloud/compute.rb +++ b/lib/fog/ecloud/compute.rb @@ -18,11 +18,7 @@ def load(objects) def check_href!(opts = {}) unless href - if opts.is_a?(String) - t = Hash.new - t[:parent] = opts - opts = t - end + opts = { :parent => opts } if opts.is_a?(String) msg = ":href missing, call with a :href pointing to #{if opts[:message] opts[:message] elsif opts[:parent] @@ -1065,7 +1061,7 @@ def self.reset end def self.data_reset - Fog::Logger.warning("#{self} => #data_reset is deprecated, use #reset instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("#{self} => #data_reset is deprecated, use #reset instead [light_black](#{caller.first})[/]") self.reset end diff --git a/lib/fog/glesys.rb b/lib/fog/glesys.rb index e99229fdc4..f7d3d32a4a 100644 --- a/lib/fog/glesys.rb +++ b/lib/fog/glesys.rb @@ -5,7 +5,7 @@ module Glesys extend Fog::Provider - service(:compute, 'glesys/compute') + service(:compute, 'glesys/compute', 'Compute') end end diff --git a/lib/fog/glesys/compute.rb b/lib/fog/glesys/compute.rb index 9211f0dea0..f364e04db0 100644 --- a/lib/fog/glesys/compute.rb +++ b/lib/fog/glesys/compute.rb @@ -36,12 +36,28 @@ class Glesys < Fog::Service class Mock - def initialize(options) - Fog::Mock.not_implemented + def initialize(options={}) + @api_url = options[:glesys_api_url] || Fog.credentials[:glesys_api_url] || API_URL + @glesys_username = options[:glesys_username] || Fog.credentials[:glesys_api_key] + @glesys_api_key = options[:glesys_api_key] || Fog.credentials[:glesys_api_key] + @connection_options = options[:connection_options] || {} + end + + def self.data + @data ||= { + } + end + + def self.reset + @data = nil + end + + def data + self.class.data end - def request(method_name, options = {}) - Fog::Mock.not_implemented + def reset_data + self.class.reset end end diff --git a/lib/fog/glesys/models/compute/server.rb b/lib/fog/glesys/models/compute/server.rb index 18f9a323f4..714fc3c519 100644 --- a/lib/fog/glesys/models/compute/server.rb +++ b/lib/fog/glesys/models/compute/server.rb @@ -50,15 +50,15 @@ def save requires :hostname, :rootpw options = { - :datacenter => "Falkenberg" || datacenter, - :platform => "Xen" || platform, + :datacenter => datacenter || "Falkenberg", + :platform => platform || "Xen", :hostname => hostname, - :template => "Debian-6 x64" || template, - :disksize => "10" || disksize, - :memorysize => "512" || memorysize, - :cpucores => "1" || cpucores, + :template => template || "Debian-6 x64", + :disksize => disksize || "10", + :memorysize => memorysize || "512", + :cpucores => cpucores || "1", :rootpw => rootpw, - :transfer => "500" || transfer, + :transfer => transfer || "500", } data = connection.create(options) merge_attributes(data.body['response']['server']) diff --git a/lib/fog/glesys/models/compute/templates.rb b/lib/fog/glesys/models/compute/templates.rb index f846ae50a9..65abb2440e 100644 --- a/lib/fog/glesys/models/compute/templates.rb +++ b/lib/fog/glesys/models/compute/templates.rb @@ -9,12 +9,6 @@ class Templates < Fog::Collection model Fog::Glesys::Compute::Template - #attribute :platform - #attribute :name - #attribute :os - #attribute :min_mem_size - #attribute :min_disk_size - def all openvz = connection.template_list.body['response']['templates']['OpenVZ'] xen = connection.template_list.body['response']['templates']['Xen'] diff --git a/lib/fog/go_grid.rb b/lib/fog/go_grid.rb index b86f71947c..4878d2a50d 100644 --- a/lib/fog/go_grid.rb +++ b/lib/fog/go_grid.rb @@ -5,7 +5,7 @@ module GoGrid extend Fog::Provider - service(:compute, 'go_grid/compute') + service(:compute, 'go_grid/compute', 'Compute') end end diff --git a/lib/fog/go_grid/requests/compute/grid_server_power.rb b/lib/fog/go_grid/requests/compute/grid_server_power.rb index b0af6476d5..1823bebc7f 100644 --- a/lib/fog/go_grid/requests/compute/grid_server_power.rb +++ b/lib/fog/go_grid/requests/compute/grid_server_power.rb @@ -16,7 +16,7 @@ class Real def grid_server_power(server, power) request( :path => 'grid/server/power', - :query => {'server' => server} + :query => {'server' => server, 'power' => power} ) end diff --git a/lib/fog/google.rb b/lib/fog/google.rb index 2f154df7ef..2e3029679c 100644 --- a/lib/fog/google.rb +++ b/lib/fog/google.rb @@ -5,7 +5,7 @@ module Google extend Fog::Provider - service(:storage, 'google/storage') + service(:storage, 'google/storage', 'Storage') class Mock diff --git a/lib/fog/google/models/storage/file.rb b/lib/fog/google/models/storage/file.rb index 7e63e95a77..e6ec568c29 100644 --- a/lib/fog/google/models/storage/file.rb +++ b/lib/fog/google/models/storage/file.rb @@ -106,7 +106,7 @@ def public_url def save(options = {}) requires :body, :directory, :key if options != {} - Fog::Logger.warning("options param is deprecated, use acl= instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("options param is deprecated, use acl= instead [light_black](#{caller.first})[/]") end options['x-goog-acl'] ||= @acl if @acl options['Cache-Control'] = cache_control if cache_control diff --git a/lib/fog/google/requests/storage/get_object_url.rb b/lib/fog/google/requests/storage/get_object_url.rb index b55bd3da38..40f085aab7 100644 --- a/lib/fog/google/requests/storage/get_object_url.rb +++ b/lib/fog/google/requests/storage/get_object_url.rb @@ -18,7 +18,7 @@ class Real # http://docs.amazonwebservices.com/AmazonS3/latest/dev/S3_QSAuth.html def get_object_url(bucket_name, object_name, expires) - Fog::Logger.warning("Fog::Storage::Google => ##{get_object_url} is deprecated, use ##{get_object_https_url} instead[/] [light_black](#{caller.first})") + Fog::Logger.deprecation("Fog::Storage::Google => ##{get_object_url} is deprecated, use ##{get_object_https_url} instead[/] [light_black](#{caller.first})") get_object_https_url(bucket_name, object_name, expires) end @@ -27,7 +27,7 @@ def get_object_url(bucket_name, object_name, expires) class Mock # :nodoc:all def get_object_url(bucket_name, object_name, expires) - Fog::Logger.warning("Fog::Storage::Google => ##{get_object_url} is deprecated, use ##{get_object_https_url} instead[/] [light_black](#{caller.first})") + Fog::Logger.deprecation("Fog::Storage::Google => ##{get_object_url} is deprecated, use ##{get_object_https_url} instead[/] [light_black](#{caller.first})") get_object_https_url(bucket_name, object_name, expires) end diff --git a/lib/fog/google/storage.rb b/lib/fog/google/storage.rb index fd36b192be..43d50da3e5 100644 --- a/lib/fog/google/storage.rb +++ b/lib/fog/google/storage.rb @@ -45,7 +45,7 @@ def https_url(params, expires) end def url(params, expires) - Fog::Logger.warning("Fog::Storage::Google => #url is deprecated, use #https_url instead [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("Fog::Storage::Google => #url is deprecated, use #https_url instead [light_black](#{caller.first})[/]") https_url(params, expires) end diff --git a/lib/fog/libvirt.rb b/lib/fog/libvirt.rb index 543636a449..c1a13f0651 100644 --- a/lib/fog/libvirt.rb +++ b/lib/fog/libvirt.rb @@ -5,7 +5,7 @@ module Libvirt extend Fog::Provider - service(:compute, 'libvirt/compute') + service(:compute, 'libvirt/compute', 'Compute') end end diff --git a/lib/fog/libvirt/compute.rb b/lib/fog/libvirt/compute.rb index aa9fc7440f..2557f1f82a 100644 --- a/lib/fog/libvirt/compute.rb +++ b/lib/fog/libvirt/compute.rb @@ -33,7 +33,7 @@ def initialize(options={}) class Real - attr_reader :connection + attr_reader :raw attr_reader :uri attr_reader :ip_command @@ -47,7 +47,7 @@ def initialize(options={}) begin if options[:libvirt_username] and options[:libvirt_password] - @connection = ::Libvirt::open_auth(@uri.uri, [::Libvirt::CRED_AUTHNAME, ::Libvirt::CRED_PASSPHRASE]) do |cred| + @raw = ::Libvirt::open_auth(@uri.uri, [::Libvirt::CRED_AUTHNAME, ::Libvirt::CRED_PASSPHRASE]) do |cred| if cred['type'] == ::Libvirt::CRED_AUTHNAME res = options[:libvirt_username] elsif cred["type"] == ::Libvirt::CRED_PASSPHRASE @@ -56,7 +56,7 @@ def initialize(options={}) end end else - @connection = ::Libvirt::open(@uri.uri) + @raw = ::Libvirt::open(@uri.uri) end rescue ::Libvirt::ConnectionError @@ -91,8 +91,8 @@ def enhance_uri(uri) # hack to provide 'requests' def method_missing(method_sym, *arguments, &block) - if @connection.respond_to?(method_sym) - @connection.send(method_sym, *arguments) + if @raw.respond_to?(method_sym) + @raw.send(method_sym, *arguments) else super end diff --git a/lib/fog/libvirt/models/compute/interfaces.rb b/lib/fog/libvirt/models/compute/interfaces.rb index 6d765400c7..74c9f939a6 100644 --- a/lib/fog/libvirt/models/compute/interfaces.rb +++ b/lib/fog/libvirt/models/compute/interfaces.rb @@ -12,12 +12,12 @@ class Interfaces < Fog::Collection def all(filter=nil) data=[] if filter.nil? - connection.list_interfaces.each do |ifname| - interface=connection.lookup_interface_by_name(ifname) + connection.raw.list_interfaces.each do |ifname| + interface=connection.raw.lookup_interface_by_name(ifname) data << { :raw => interface } end - connection.list_defined_interfaces.each do |ifname| - interface=connection.lookup_interface_by_name(ifname) + connection.raw.list_defined_interfaces.each do |ifname| + interface=connection.raw.lookup_interface_by_name(ifname) data << { :raw => interface } end @@ -42,14 +42,14 @@ def get(key) #private # Making these private, screws up realod # Retrieve the interface by name def get_by_name(name) - interface=connection.lookup_interface_by_name(name) + interface=connection.raw.lookup_interface_by_name(name) return interface # new(:raw => interface) end # Retrieve the interface by name def get_by_mac(mac) - interface=connection.lookup_interface_by_mac(mac) + interface=connection.raw.lookup_interface_by_mac(mac) return interface # new(:raw => interface) end diff --git a/lib/fog/libvirt/models/compute/pool.rb b/lib/fog/libvirt/models/compute/pool.rb index 5a8726a72c..2331aa4d53 100644 --- a/lib/fog/libvirt/models/compute/pool.rb +++ b/lib/fog/libvirt/models/compute/pool.rb @@ -26,9 +26,9 @@ def save unless xml.nil? pool=nil if self.create_persistent - pool=connection.connection.define_storage_pool_xml(xml) + pool=connection.raw.define_storage_pool_xml(xml) else - pool=connection.connection.create_storage_pool_xml(xml) + pool=connection.raw.create_storage_pool_xml(xml) end self.raw=pool true diff --git a/lib/fog/libvirt/models/compute/pools.rb b/lib/fog/libvirt/models/compute/pools.rb index d33d31443e..feaefb223f 100644 --- a/lib/fog/libvirt/models/compute/pools.rb +++ b/lib/fog/libvirt/models/compute/pools.rb @@ -12,13 +12,13 @@ class Pools < Fog::Collection def all(filter=nil) data=[] if filter.nil? - connection.list_storage_pools.each do |poolname| - pool=connection.lookup_storage_pool_by_name(poolname) + connection.raw.list_storage_pools.each do |poolname| + pool=connection.raw.lookup_storage_pool_by_name(poolname) data << { :raw => pool } end - connection.list_defined_storage_pools.each do |poolname| + connection.raw.list_defined_storage_pools.each do |poolname| data << { - :raw => connection.lookup_storage_pool_by_name(poolname) + :raw => connection.raw.lookup_storage_pool_by_name(poolname) } end else @@ -42,13 +42,13 @@ def get(uuid) #private # Making these private, screws up realod # Retrieve the pool by uuid def get_by_uuid(uuid) - pool=connection.lookup_storage_pool_by_uuid(uuid) + pool=connection.raw.lookup_storage_pool_by_uuid(uuid) return pool end # Retrieve the pool by name def get_by_name(name) - pool=connection.lookup_storage_pool_by_name(name) + pool=connection.raw.lookup_storage_pool_by_name(name) return pool # new(:raw => pool) end diff --git a/lib/fog/libvirt/models/compute/server.rb b/lib/fog/libvirt/models/compute/server.rb index 565ed1e9bc..31df6638c4 100644 --- a/lib/fog/libvirt/models/compute/server.rb +++ b/lib/fog/libvirt/models/compute/server.rb @@ -35,7 +35,7 @@ class Server < Fog::Model # The following attributes are only needed when creating a new vm attr_accessor :iso_dir, :iso_file attr_accessor :network_interface_type ,:network_nat_network, :network_bridge_name - attr_accessor :disk_format_type, :disk_allocation,:disk_capacity, :disk_name, :disk_pool_name, :disk_template_name, :disk_path + attr_accessor :volume_format_type, :volume_allocation,:volume_capacity, :volume_name, :volume_pool_name, :volume_template_name, :volume_path attr_accessor :password attr_writer :private_key, :private_key_path, :public_key, :public_key_path, :username @@ -62,13 +62,13 @@ def initialize(attributes={} ) self.iso_file ||=nil unless attributes[:iso_file] self.iso_dir ||="/var/lib/libvirt/images" unless attributes[:iso_dir] - self.disk_format_type ||=nil unless attributes[:disk_format_type] - self.disk_capacity ||=nil unless attributes[:disk_capacity] - self.disk_allocation ||=nil unless attributes[:disk_allocation] + self.volume_format_type ||=nil unless attributes[:volume_format_type] + self.volume_capacity ||=nil unless attributes[:volume_capacity] + self.volume_allocation ||=nil unless attributes[:volume_allocation] - self.disk_name ||=nil unless attributes[:disk_name] - self.disk_pool_name ||=nil unless attributes[:disk_pool_name] - self.disk_template_name ||=nil unless attributes[:disk_template_name] + self.volume_name ||=nil unless attributes[:volume_name] + self.volume_pool_name ||=nil unless attributes[:volume_pool_name] + self.volume_template_name ||=nil unless attributes[:volume_template_name] self.network_interface_type ||="nat" unless attributes[:network_interface_type] self.network_nat_network ||="default" unless attributes[:network_nat_network] @@ -94,9 +94,9 @@ def save if !xml.nil? domain=nil if self.persistent - domain=connection.define_domain_xml(xml) + domain=connection.raw.define_domain_xml(xml) else - domain=connection.create_domain_xml(xml) + domain=connection.raw.create_domain_xml(xml) end self.raw=domain end @@ -109,39 +109,39 @@ def create_or_clone_volume volume_options=Hash.new - unless self.disk_name.nil? - volume_options[:name]=self.disk_name + unless self.volume_name.nil? + volume_options[:name]=self.volume_name else - extension = self.disk_format_type.nil? ? "img" : self.disk_format_type + extension = self.volume_format_type.nil? ? "img" : self.volume_format_type volume_name = "#{self.name}.#{extension}" volume_options[:name]=volume_name end # Check if a disk template was specified - unless self.disk_template_name.nil? + unless self.volume_template_name.nil? - template_volumes=connection.volumes.all(:name => self.disk_template_name) + template_volumes=connection.volumes.all(:name => self.volume_template_name) - raise Fog::Errors::Error.new("Template #{self.disk_template_name} not found") unless template_volumes.length==1 + raise Fog::Errors::Error.new("Template #{self.volume_template_name} not found") unless template_volumes.length==1 orig_volume=template_volumes.first - self.disk_format_type=orig_volume.format_type unless self.disk_format_type + self.volume_format_type=orig_volume.format_type unless self.volume_format_type volume=orig_volume.clone("#{volume_options[:name]}") # This gets passed to the domain to know the path of the disk - self.disk_path=volume.path + self.volume_path=volume.path else # If no template volume was given, let's create our own volume - volume_options[:format_type]=self.disk_format_type unless self.disk_format_type.nil? - volume_options[:capacity]=self.disk_capacity unless self.disk_capacity.nil? - volume_options[:allocation]=self.disk_allocation unless self.disk_allocation.nil? + volume_options[:format_type]=self.volume_format_type unless self.volume_format_type.nil? + volume_options[:capacity]=self.volume_capacity unless self.volume_capacity.nil? + volume_options[:allocation]=self.volume_allocation unless self.volume_allocation.nil? begin volume=connection.volumes.create(volume_options) - self.disk_path=volume.path - self.disk_format_type=volume.format_type unless self.disk_format_type + self.volume_path=volume.path + self.volume_format_type=volume.format_type unless self.volume_format_type rescue raise Fog::Errors::Error.new("Error creating the volume : #{$!}") end @@ -166,8 +166,8 @@ def xml_from_template :iso_dir => self.iso_dir, :os_type => self.os_type, :arch => self.arch, - :disk_path => self.disk_path, - :disk_format_type => self.disk_format_type, + :volume_path => self.volume_path, + :volume_format_type => self.volume_format_type, :network_interface_type => self.network_interface_type, :network_nat_network => self.network_nat_network, :network_bridge_name => self.network_bridge_name @@ -208,7 +208,6 @@ def destroy(options={ :destroy_volumes => false}) @raw.undefine end - def reboot requires :raw @raw.reboot @@ -280,7 +279,7 @@ def addresses(options={}) # Aug 24 17:34:41 juno arpwatch: new station 10.247.4.137 52:54:00:88:5a:0a eth0.4 # Aug 24 17:37:19 juno arpwatch: changed ethernet address 10.247.4.137 52:54:00:27:33:00 (52:54:00:88:5a:0a) eth0.4 # Check if another ip_command string was provided - ip_command_global=@connection.ip_command.nil? ? 'grep $mac /var/log/arpwatch.log|sed -e "s/new station//"|sed -e "s/changed ethernet address//g" |tail -1 |cut -d ":" -f 4-| cut -d " " -f 3' : @connection.ip_command + ip_command_global=@connection.ip_command.nil? ? 'grep $mac /var/log/arpwatch.log|sed -e "s/new station//"|sed -e "s/changed ethernet address//g" |sed -e "s/reused old ethernet //" |tail -1 |cut -d ":" -f 4-| cut -d " " -f 3' : @connection.ip_command ip_command_local=options[:ip_command].nil? ? ip_command_global : options[:ip_command] ip_command="mac=#{mac}; "+ip_command_local diff --git a/lib/fog/libvirt/models/compute/servers.rb b/lib/fog/libvirt/models/compute/servers.rb index 9a424f2f1e..14e3bffa8a 100644 --- a/lib/fog/libvirt/models/compute/servers.rb +++ b/lib/fog/libvirt/models/compute/servers.rb @@ -17,13 +17,13 @@ def all(filter=nil) unless filter.has_key?(:name) || filter.has_key?(:uuid) if include_defined - connection.list_defined_domains.map do |domain| - data << { :raw => connection.lookup_domain_by_name(domain) } + connection.raw.list_defined_domains.map do |domain| + data << { :raw => connection.raw.lookup_domain_by_name(domain) } end end if include_active - connection.list_domains.each do |domain| - data << { :raw => connection.lookup_domain_by_id(domain) } + connection.raw.list_domains.each do |domain| + data << { :raw => connection.raw.lookup_domain_by_id(domain) } end end else @@ -60,14 +60,14 @@ def bootstrap(new_attributes = {}) # Retrieve the server by uuid def get_by_uuid(uuid) - server=connection.lookup_domain_by_uuid(uuid) + server=connection.raw.lookup_domain_by_uuid(uuid) return server # new(:raw => machine) end # Retrieve the server by name def get_by_name(name) - server=connection.lookup_domain_by_name(name) + server=connection.raw.lookup_domain_by_name(name) return server # new(:raw => machine) end diff --git a/lib/fog/libvirt/models/compute/templates/server.xml.erb b/lib/fog/libvirt/models/compute/templates/server.xml.erb index 3f15c37b9e..481c667f48 100644 --- a/lib/fog/libvirt/models/compute/templates/server.xml.erb +++ b/lib/fog/libvirt/models/compute/templates/server.xml.erb @@ -18,8 +18,8 @@ - - + + <% if !iso_file.nil? %> diff --git a/lib/fog/libvirt/models/compute/uri.rb b/lib/fog/libvirt/models/compute/uri.rb index 11a824d560..cfab55b6e7 100644 --- a/lib/fog/libvirt/models/compute/uri.rb +++ b/lib/fog/libvirt/models/compute/uri.rb @@ -126,9 +126,13 @@ def pkipath # http://libvirt.org/remote.html private def value(name) - params=CGI.parse(@parsed_uri.query) - if params.has_key?(name) - return params[name].first + unless @parsed_uri.query.nil? + params=CGI.parse(@parsed_uri.query) + if params.has_key?(name) + return params[name].first + else + return nil + end else return nil end diff --git a/lib/fog/libvirt/models/compute/volume.rb b/lib/fog/libvirt/models/compute/volume.rb index 1df99aaff8..930e8a3eb9 100644 --- a/lib/fog/libvirt/models/compute/volume.rb +++ b/lib/fog/libvirt/models/compute/volume.rb @@ -70,7 +70,7 @@ def save begin volume=nil - pool=connection.lookup_storage_pool_by_name(pool_name) + pool=connection.raw.lookup_storage_pool_by_name(pool_name) volume=pool.create_volume_xml(xml) self.raw=volume true diff --git a/lib/fog/libvirt/models/compute/volumes.rb b/lib/fog/libvirt/models/compute/volumes.rb index 334150ad38..18a2b3ca30 100644 --- a/lib/fog/libvirt/models/compute/volumes.rb +++ b/lib/fog/libvirt/models/compute/volumes.rb @@ -12,8 +12,8 @@ class Volumes < Fog::Collection def all(filter=nil) data=[] if filter.nil? - connection.list_storage_pools.each do |poolname| - pool=connection.lookup_storage_pool_by_name(poolname) + connection.raw.list_storage_pools.each do |poolname| + pool=connection.raw.lookup_storage_pool_by_name(poolname) pool.list_volumes.each do |volumename| data << { :raw => pool.lookup_volume_by_name(volumename) } end @@ -42,8 +42,8 @@ def get(key) # Retrieve the volume by name def get_by_name(name) - connection.list_storage_pools.each do |poolname| - pool=connection.lookup_storage_pool_by_name(poolname) + connection.raw.list_storage_pools.each do |poolname| + pool=connection.raw.lookup_storage_pool_by_name(poolname) volume=pool.lookup_volume_by_name(name) unless volume.nil? return volume @@ -55,8 +55,8 @@ def get_by_name(name) # Retrieve the volume by key def get_by_key(key) - connection.list_storage_pools.each do |poolname| - pool=connection.lookup_storage_pool_by_name(poolname) + connection.raw.list_storage_pools.each do |poolname| + pool=connection.raw.lookup_storage_pool_by_name(poolname) volume=pool.lookup_volume_by_key(key) unless volume.nil? return volume @@ -68,8 +68,8 @@ def get_by_key(key) # Retrieve the volume by key def get_by_path(path) - connection.list_storage_pools.each do |poolname| - pool=connection.lookup_storage_pool_by_name(poolname) + connection.raw.list_storage_pools.each do |poolname| + pool=connection.raw.lookup_storage_pool_by_name(poolname) volume=pool.lookup_volume_by_key(path) unless volume.nil? return volume diff --git a/lib/fog/linode.rb b/lib/fog/linode.rb index f072bf0778..60bdd67da9 100644 --- a/lib/fog/linode.rb +++ b/lib/fog/linode.rb @@ -3,8 +3,8 @@ module Fog module Linode extend Fog::Provider - service(:compute, 'linode/compute') - service(:dns, 'linode/dns') + service(:compute, 'linode/compute', 'Compute') + service(:dns, 'linode/dns', 'DNS') end end diff --git a/lib/fog/linode/models/compute/server.rb b/lib/fog/linode/models/compute/server.rb index d6fae93089..73004a7f6f 100644 --- a/lib/fog/linode/models/compute/server.rb +++ b/lib/fog/linode/models/compute/server.rb @@ -5,14 +5,24 @@ module Compute class Linode class Server < Fog::Compute::Server attr_reader :stack_script + attr_accessor :private_key, :username identity :id attribute :name attribute :status + def initialize(attributes={}) + super + self.username = 'root' + end + def ips Fog::Compute::Linode::Ips.new :server => self, :connection => connection end + def public_ip_address + ips.find{|ip| ip.ip !~ /^192/}.ip + end + def disks Fog::Compute::Linode::Disks.new :server => self, :connection => connection end diff --git a/lib/fog/linode/models/dns/linode/domain_create.rb b/lib/fog/linode/requests/dns/domain_create.rb similarity index 92% rename from lib/fog/linode/models/dns/linode/domain_create.rb rename to lib/fog/linode/requests/dns/domain_create.rb index cf661e4cdd..0164600b77 100644 --- a/lib/fog/linode/models/dns/linode/domain_create.rb +++ b/lib/fog/linode/requests/dns/domain_create.rb @@ -8,7 +8,7 @@ class Real # ==== Parameters # * domain<~String>: The zone's name. Note, if master zone, SOA_email is required and if slave # master_ips is/are required - # * type<~String>: master or slave + # * type<~String>: master or slave # * options<~Hash> # * description<~String> Currently undisplayed # * SOA_email<~String> Required when type=master @@ -16,8 +16,8 @@ class Real # * retry_sec<~Integer> numeric, default: '0' # * expire_sec<~Integer> numeric, default: '0' # * ttl_sec<~String> numeric, default: '0' - # * status<~Integer> 0, 1, or 2 (disabled, active, edit mode), default: 1 - # * master_ips<~String> When type=slave, the zone's master DNS servers list, semicolon separated + # * status<~Integer> 0, 1, or 2 (disabled, active, edit mode), default: 1 + # * master_ips<~String> When type=slave, the zone's master DNS servers list, semicolon separated # # ==== Returns # * response<~Excon::Response>: diff --git a/lib/fog/linode/models/dns/linode/domain_delete.rb b/lib/fog/linode/requests/dns/domain_delete.rb similarity index 100% rename from lib/fog/linode/models/dns/linode/domain_delete.rb rename to lib/fog/linode/requests/dns/domain_delete.rb diff --git a/lib/fog/linode/models/dns/linode/domain_list.rb b/lib/fog/linode/requests/dns/domain_list.rb similarity index 99% rename from lib/fog/linode/models/dns/linode/domain_list.rb rename to lib/fog/linode/requests/dns/domain_list.rb index a067864739..e7f99aed1a 100644 --- a/lib/fog/linode/models/dns/linode/domain_list.rb +++ b/lib/fog/linode/requests/dns/domain_list.rb @@ -28,7 +28,7 @@ def domain_list(domain_id = nil) if domain_id options.merge!(:domainId => domain_id) end - + request( :expects => 200, :method => 'GET', diff --git a/lib/fog/linode/models/dns/linode/domain_resource_create.rb b/lib/fog/linode/requests/dns/domain_resource_create.rb similarity index 84% rename from lib/fog/linode/models/dns/linode/domain_resource_create.rb rename to lib/fog/linode/requests/dns/domain_resource_create.rb index 2e875a4e6e..52df564136 100644 --- a/lib/fog/linode/models/dns/linode/domain_resource_create.rb +++ b/lib/fog/linode/requests/dns/domain_resource_create.rb @@ -7,18 +7,18 @@ class Real # # ==== Parameters # * domain_id<~Integer>: limit the list to the domain ID specified - # * type<~String>: One of: NS, MX, A, AAAA, CNAME, TXT, or SRV + # * type<~String>: One of: NS, MX, A, AAAA, CNAME, TXT, or SRV # * options<~Hash> - # * name<~String>: The hostname or FQDN. When Type=MX the subdomain to delegate to the + # * name<~String>: The hostname or FQDN. When Type=MX the subdomain to delegate to the # Target MX server # * target<~String> When Type=MX the hostname. When Type=CNAME the target of the alias. - # When Type=TXT the value of the record. When Type=A or AAAA the token - # of '[remote_addr]' will be substituted with the IP address of the request. - # * priority<~Integer>: priority for MX and SRV records, 0-255 - default: 10 + # When Type=TXT the value of the record. When Type=A or AAAA the token + # of '[remote_addr]' will be substituted with the IP address of the request. + # * priority<~Integer>: priority for MX and SRV records, 0-255 - default: 10 # * weight<~Integer>: default: 5 - # * port<~Integer>: default: 80 - # * protocol<~String>: The protocol to append to an SRV record. Ignored on other record - # types. default: udp + # * port<~Integer>: default: 80 + # * protocol<~String>: The protocol to append to an SRV record. Ignored on other record + # types. default: udp # * ttl_sec<~Integer>: note, Linode will round the input to set values (300, 3600, 7200, etc) # ==== Returns # * response<~Excon::Response>: diff --git a/lib/fog/linode/models/dns/linode/domain_resource_delete.rb b/lib/fog/linode/requests/dns/domain_resource_delete.rb similarity index 100% rename from lib/fog/linode/models/dns/linode/domain_resource_delete.rb rename to lib/fog/linode/requests/dns/domain_resource_delete.rb diff --git a/lib/fog/linode/models/dns/linode/domain_resource_list.rb b/lib/fog/linode/requests/dns/domain_resource_list.rb similarity index 84% rename from lib/fog/linode/models/dns/linode/domain_resource_list.rb rename to lib/fog/linode/requests/dns/domain_resource_list.rb index 50097e4782..4a8fb1b320 100644 --- a/lib/fog/linode/models/dns/linode/domain_resource_list.rb +++ b/lib/fog/linode/requests/dns/domain_resource_list.rb @@ -13,18 +13,18 @@ class Real # * response<~Excon::Response>: # * body<~Array>: # * DATA<~Array> - # * 'PROTOCOL'<~String>: for SRV records. default is UDP - # * 'TTL_SEC'<~Interger>: + # * 'PROTOCOL'<~String>: for SRV records. default is UDP + # * 'TTL_SEC'<~Interger>: # * 'PRIORITY'<~Interger>: for MX and SRV records - # * 'TYPE'<~String>: One of: NS, MX, A, AAAA, CNAME, TXT, or SRV - # * 'TARGET'<~String>: When Type=MX the hostname. When Type=CNAME the target of the alias. - # When Type=TXT the value of the record. When Type=A or AAAA the token + # * 'TYPE'<~String>: One of: NS, MX, A, AAAA, CNAME, TXT, or SRV + # * 'TARGET'<~String>: When Type=MX the hostname. When Type=CNAME the target of the alias. + # When Type=TXT the value of the record. When Type=A or AAAA the token # of '[remote_addr]' will be substituted with the IP address of the request. - # * 'WEIGHT'<~Interger>: + # * 'WEIGHT'<~Interger>: # * 'RESOURCEID'<~Interger>: ID of the resource record - # * 'PORT'<~Interger>: + # * 'PORT'<~Interger>: # * 'DOMAINID'<~Interger>: ID of the domain that this record belongs to - # * 'NAME'<~Interger>: The hostname or FQDN. When Type=MX, the subdomain to delegate to + # * 'NAME'<~Interger>: The hostname or FQDN. When Type=MX, the subdomain to delegate to def domain_resource_list(domain_id, resource_id = nil) query = { :api_action => 'domain.resource.list', :domainID => domain_id } if resource_id diff --git a/lib/fog/linode/models/dns/linode/domain_resource_update.rb b/lib/fog/linode/requests/dns/domain_resource_update.rb similarity index 83% rename from lib/fog/linode/models/dns/linode/domain_resource_update.rb rename to lib/fog/linode/requests/dns/domain_resource_update.rb index cda9053dce..236b6582a0 100644 --- a/lib/fog/linode/models/dns/linode/domain_resource_update.rb +++ b/lib/fog/linode/requests/dns/domain_resource_update.rb @@ -9,17 +9,17 @@ class Real # * domain_id<~Integer>: limit the list to the domain ID specified # * resource_id<~Integer>: id of resouce to delete # * options<~Hash> - # * type<~String>: One of: NS, MX, A, AAAA, CNAME, TXT, or SRV - # * name<~String>: The hostname or FQDN. When Type=MX the subdomain to delegate to the + # * type<~String>: One of: NS, MX, A, AAAA, CNAME, TXT, or SRV + # * name<~String>: The hostname or FQDN. When Type=MX the subdomain to delegate to the # Target MX server - # * target<~String> When Type=MX the hostname. When Type=CNAME the target of the alias. - # When Type=TXT the value of the record. When Type=A or AAAA the token - # of '[remote_addr]' will be substituted with the IP address of the request. - # * priority<~Integer>: priority for MX and SRV records, 0-255 - default: 10 + # * target<~String> When Type=MX the hostname. When Type=CNAME the target of the alias. + # When Type=TXT the value of the record. When Type=A or AAAA the token + # of '[remote_addr]' will be substituted with the IP address of the request. + # * priority<~Integer>: priority for MX and SRV records, 0-255 - default: 10 # * weight<~Integer>: default: 5 - # * port<~Integer>: default: 80 - # * protocol<~String>: The protocol to append to an SRV record. Ignored on other record - # types. default: udp + # * port<~Integer>: default: 80 + # * protocol<~String>: The protocol to append to an SRV record. Ignored on other record + # types. default: udp # * ttl_sec<~Integer>: note, Linode will round the input to set values (300, 3600, 7200, etc) # ==== Returns # * response<~Excon::Response>: diff --git a/lib/fog/linode/models/dns/linode/domain_update.rb b/lib/fog/linode/requests/dns/domain_update.rb similarity index 86% rename from lib/fog/linode/models/dns/linode/domain_update.rb rename to lib/fog/linode/requests/dns/domain_update.rb index 93737b5556..c5d0a037a0 100644 --- a/lib/fog/linode/models/dns/linode/domain_update.rb +++ b/lib/fog/linode/requests/dns/domain_update.rb @@ -8,16 +8,16 @@ class Real # ==== Parameters # * domain_id<~Integer>: The ID to identify the zone # * options<~Hash> - # * domain<~String>: The zone's name. - # * type<~String>: master or slave + # * domain<~String>: The zone's name. + # * type<~String>: master or slave # * description<~String> Currently undisplayed # * SOA_email<~String> Required when type=master # * refresh_sec<~Integer> numeric, default: '0' # * retry_sec<~Integer> numeric, default: '0' # * expire_sec<~Integer> numeric, default: '0' # * ttl_sec<~String> numeric, default: '0' - # * status<~Integer> 0, 1, or 2 (disabled, active, edit mode), default: 1 - # * master_ips<~String> When type=slave, the zone's master DNS servers list, semicolon separated + # * status<~Integer> 0, 1, or 2 (disabled, active, edit mode), default: 1 + # * master_ips<~String> When type=slave, the zone's master DNS servers list, semicolon separated # # ==== Returns # * response<~Excon::Response>: @@ -25,7 +25,7 @@ class Real # * DATA<~Hash>: # * 'DomainID'<~Integer>: domain ID def domain_update(domain_id, options = {}) - + request( :expects => 200, :method => 'GET', diff --git a/lib/fog/local.rb b/lib/fog/local.rb index d008fa002c..3bc976acce 100644 --- a/lib/fog/local.rb +++ b/lib/fog/local.rb @@ -5,7 +5,7 @@ module Local extend Fog::Provider - service(:storage, 'local/storage') + service(:storage, 'local/storage', 'Storage') end end diff --git a/lib/fog/new_servers.rb b/lib/fog/new_servers.rb index a3f257f67a..9a7382657d 100644 --- a/lib/fog/new_servers.rb +++ b/lib/fog/new_servers.rb @@ -5,7 +5,7 @@ module NewServers extend Fog::Provider - service(:compute, 'new_servers/compute') + service(:compute, 'new_servers/compute', 'Compute') end end diff --git a/lib/fog/ninefold.rb b/lib/fog/ninefold.rb index b55e8e1728..31ce372709 100644 --- a/lib/fog/ninefold.rb +++ b/lib/fog/ninefold.rb @@ -5,8 +5,8 @@ module Ninefold extend Fog::Provider - service(:compute, 'ninefold/compute') - service(:storage, 'ninefold/storage') + service(:compute, 'ninefold/compute', 'Compute') + service(:storage, 'ninefold/storage', 'Storage') end end diff --git a/lib/fog/ninefold/compute.rb b/lib/fog/ninefold/compute.rb index a3090ec7e1..0cdc8dbedf 100644 --- a/lib/fog/ninefold/compute.rb +++ b/lib/fog/ninefold/compute.rb @@ -21,7 +21,7 @@ class Ninefold < Fog::Service model :ip_forwarding_rule collection :ip_forwarding_rules - request_path 'fog/compute/requests/ninefold' + request_path 'fog/ninefold/requests/compute' # General list-only stuff request :list_accounts request :list_events diff --git a/lib/fog/ninefold/models/compute/server.rb b/lib/fog/ninefold/models/compute/server.rb index 80649f1dff..46fbb087d9 100644 --- a/lib/fog/ninefold/models/compute/server.rb +++ b/lib/fog/ninefold/models/compute/server.rb @@ -69,7 +69,8 @@ class Server < Fog::Compute::Server def initialize(attributes={}) merge_attributes({ - :flavor_id => 105 # '1CPU, 384MB, 80GB HDD' + :flavor_id => 105, # '1CPU, 384MB, 80GB HDD' + :image_id => 421 # 'XEN Basic Ubuntu 10.04 Server x64 PV r2.0' }) super end diff --git a/lib/fog/ninefold/models/storage/file.rb b/lib/fog/ninefold/models/storage/file.rb index 8926f16424..79aa2a89a8 100644 --- a/lib/fog/ninefold/models/storage/file.rb +++ b/lib/fog/ninefold/models/storage/file.rb @@ -13,7 +13,7 @@ class File < Fog::Model attribute :objectid, :aliases => :ObjectID def body - attributes[:body] ||= if last_modified + attributes[:body] ||= if objectid collection.get(identity).body else '' @@ -73,13 +73,15 @@ def save(options = {}) options[:headers] ||= {} options[:headers]['Content-Type'] = content_type if content_type options[:body] = body - if objectid - # pre-existing file, do a PUT - data = connection.put_namespace(ns, options) - else - # new file, POST + begin data = connection.post_namespace(ns, options) self.objectid = data.headers['location'].split('/')[-1] + rescue => error + if error.message =~ /The resource you are trying to create already exists./ + data = connection.put_namespace(ns, options) + else + raise error + end end # merge_attributes(data.headers) true diff --git a/lib/fog/ninefold/storage.rb b/lib/fog/ninefold/storage.rb index e51349fccb..b561d03014 100644 --- a/lib/fog/ninefold/storage.rb +++ b/lib/fog/ninefold/storage.rb @@ -107,9 +107,10 @@ def request(params, &block) customheaders = {} params[:headers].each { |key,value| - if key == "x-emc-date" + case key + when 'x-emc-date', 'x-emc-signature' #skip - elsif key =~ /^x-emc-/ + when /^x-emc-/ customheaders[ key.downcase ] = value end } diff --git a/lib/fog/openstack.rb b/lib/fog/openstack.rb new file mode 100644 index 0000000000..afebfd0b8c --- /dev/null +++ b/lib/fog/openstack.rb @@ -0,0 +1,113 @@ +require(File.expand_path(File.join(File.dirname(__FILE__), 'core'))) + +module Fog + module OpenStack + extend Fog::Provider + + module Errors + class ServiceError < Fog::Errors::Error + attr_reader :response_data + + def self.slurp(error) + if error.response.body.empty? + data = nil + message = nil + else + data = MultiJson.decode(error.response.body) + message = data['message'] + end + + new_error = super(error, message) + new_error.instance_variable_set(:@response_data, data) + new_error + end + end + + class InternalServerError < ServiceError; end + class Conflict < ServiceError; end + class NotFound < ServiceError; end + class ServiceUnavailable < ServiceError; end + + class BadRequest < ServiceError + attr_reader :validation_errors + + def self.slurp(error) + new_error = super(error) + unless new_error.response_data.nil? + new_error.instance_variable_set(:@validation_errors, new_error.response_data['validationErrors']) + end + new_error + end + end + end + + service(:compute, 'openstack/compute', 'Compute') + + # legacy v1.0 style auth + def self.authenticate_v1(options, connection_options = {}) + openstack_auth_url = options[:openstack_auth_url] + uri = URI.parse(openstack_auth_url) + connection = Fog::Connection.new(openstack_auth_url, false, connection_options) + @openstack_api_key = options[:openstack_api_key] + @openstack_username = options[:openstack_username] + response = connection.request({ + :expects => [200, 204], + :headers => { + 'X-Auth-Key' => @openstack_api_key, + 'X-Auth-User' => @openstack_username + }, + :host => uri.host, + :method => 'GET', + :path => (uri.path and not uri.path.empty?) ? uri.path : 'v1.0' + }) + + return { + :token => response.headers['X-Auth-Token'], + :server_management_url => response.headers['X-Server-Management-Url'] + } + + end + + # keystone style auth + def self.authenticate_v2(options, connection_options = {}) + openstack_auth_url = options[:openstack_auth_url] + uri = URI.parse(openstack_auth_url) + connection = Fog::Connection.new(openstack_auth_url, false, connection_options) + @openstack_api_key = options[:openstack_api_key] + @openstack_username = options[:openstack_username] + @openstack_tenant = options[:openstack_tenant] + @compute_service_name = options[:openstack_compute_service_name] + + req_body= { + 'passwordCredentials' => { + 'username' => @openstack_username, + 'password' => @openstack_api_key + } + } + req_body['tenantId'] = @openstack_tenant if @openstack_tenant + + response = connection.request({ + :expects => [200, 204], + :headers => {'Content-Type' => 'application/json'}, + :body => MultiJson.encode(req_body), + :host => uri.host, + :method => 'POST', + :path => (uri.path and not uri.path.empty?) ? uri.path : 'v2.0' + }) + body=MultiJson.decode(response.body) + + if body['auth']['serviceCatalog'] and body['auth']['serviceCatalog'][@compute_service_name] and body['auth']['serviceCatalog'][@compute_service_name][0] then + mgmt_url = body['auth']['serviceCatalog'][@compute_service_name][0]['publicURL'] + token = body['auth']['token']['id'] + return { + :token => token, + :server_management_url => mgmt_url + } + else + raise "Unable to parse service catalog." + end + + end + + end +end diff --git a/lib/fog/openstack/compute.rb b/lib/fog/openstack/compute.rb new file mode 100644 index 0000000000..2a4b548a3b --- /dev/null +++ b/lib/fog/openstack/compute.rb @@ -0,0 +1,198 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'openstack')) +require 'fog/compute' +require 'fog/openstack' + +module Fog + module Compute + class OpenStack < Fog::Service + + requires :openstack_api_key, :openstack_username, :openstack_auth_url + recognizes :openstack_auth_token, :openstack_management_url, :persistent, :openstack_compute_service_name, :openstack_tenant + + model_path 'fog/openstack/models/compute' + model :flavor + collection :flavors + model :image + collection :images + model :server + collection :servers + model :meta + collection :metadata + + request_path 'fog/openstack/requests/compute' + request :create_server + request :delete_image + request :delete_server + request :get_flavor_details + request :get_image_details + request :get_server_details + request :list_addresses + request :list_private_addresses + request :list_public_addresses + request :list_flavors + request :list_flavors_detail + request :list_images + request :list_images_detail + request :list_servers + request :list_servers_detail + + request :server_action + request :change_password_server + request :reboot_server + request :rebuild_server + request :resize_server + request :confirm_resized_server + request :revert_resized_server + request :create_image + + request :update_server + + request :set_metadata + request :update_metadata + request :list_metadata + + request :get_meta + request :update_meta + request :delete_meta + + class Mock + + def self.data + @data ||= Hash.new do |hash, key| + hash[key] = { + :last_modified => { + :images => {}, + :servers => {} + }, + :images => { + "1" => { + 'id' => "1", + 'name' => "img1", + 'progress' => 100, + 'status' => "ACTIVE", + 'updated' => "", + 'minRam' => 0, + 'minDisk' => 0, + 'metadata' => {}, + 'links' => [] + } + }, + :servers => {} + } + end + end + + def self.reset + @data = nil + end + + def initialize(options={}) + require 'multi_json' + @openstack_username = options[:openstack_username] + end + + def data + self.class.data[@openstack_username] + end + + def reset_data + self.class.data.delete(@openstack_username) + end + + end + + class Real + + def initialize(options={}) + require 'multi_json' + @openstack_api_key = options[:openstack_api_key] + @openstack_username = options[:openstack_username] + @openstack_tenant = options[:openstack_tenant] + @openstack_compute_service_name = options[:openstack_compute_service_name] || 'nova' + @openstack_auth_url = options[:openstack_auth_url] + @openstack_auth_token = options[:openstack_auth_token] + @openstack_management_url = options[:openstack_management_url] + @openstack_must_reauthenticate = false + @connection_options = options[:connection_options] || {} + authenticate + @persistent = options[:persistent] || false + @connection = Fog::Connection.new("#{@scheme}://#{@host}:#{@port}", @persistent, @connection_options) + end + + def reload + @connection.reset + end + + def request(params) + begin + response = @connection.request(params.merge({ + :headers => { + 'Content-Type' => 'application/json', + 'X-Auth-Token' => @auth_token + }.merge!(params[:headers] || {}), + :host => @host, + :path => "#{@path}/#{params[:path]}", + :query => ('ignore_awful_caching' << Time.now.to_i.to_s) + })) + rescue Excon::Errors::Unauthorized => error + if error.response.body != 'Bad username or password' # token expiration + @openstack_must_reauthenticate = true + authenticate + retry + else # bad credentials + raise error + end + rescue Excon::Errors::HTTPStatusError => error + raise case error + when Excon::Errors::NotFound + Fog::Compute::OpenStack::NotFound.slurp(error) + else + error + end + end + unless response.body.empty? + response.body = MultiJson.decode(response.body) + end + response + end + + private + + def authenticate + if @openstack_must_reauthenticate || @openstack_auth_token.nil? + options = { + :openstack_api_key => @openstack_api_key, + :openstack_username => @openstack_username, + :openstack_auth_url => @openstack_auth_url, + :openstack_tenant => @openstack_tenant, + :openstack_compute_service_name => @openstack_compute_service_name + } + if @openstack_auth_url =~ /\/v2.0\// + credentials = Fog::OpenStack.authenticate_v2(options, @connection_options) + else + credentials = Fog::OpenStack.authenticate_v1(options, @connection_options) + end + @auth_token = credentials[:token] + url = credentials[:server_management_url] + uri = URI.parse(url) + else + @auth_token = @openstack_auth_token + uri = URI.parse(@openstack_management_url) + end + @host = uri.host + @path = uri.path + @path.sub!(/\/$/, '') + unless @path.match(/1\.1/) + raise Fog::Compute::OpenStack::ServiceUnavailable.new( + "OpenStack binding only supports version 1.1") + end + # Add tenant + @path += @openstack_tenant if @openstack_tenant + @port = uri.port + @scheme = uri.scheme + end + + end + end + end +end diff --git a/lib/fog/openstack/models/compute/flavor.rb b/lib/fog/openstack/models/compute/flavor.rb new file mode 100644 index 0000000000..2e998a61b0 --- /dev/null +++ b/lib/fog/openstack/models/compute/flavor.rb @@ -0,0 +1,20 @@ +require 'fog/core/model' + +module Fog + module Compute + class OpenStack + + class Flavor < Fog::Model + + identity :id + + attribute :disk + attribute :name + attribute :ram + attribute :links + + end + + end + end +end diff --git a/lib/fog/openstack/models/compute/flavors.rb b/lib/fog/openstack/models/compute/flavors.rb new file mode 100644 index 0000000000..6f9d2a7683 --- /dev/null +++ b/lib/fog/openstack/models/compute/flavors.rb @@ -0,0 +1,28 @@ +require 'fog/core/collection' +require 'fog/openstack/models/compute/flavor' + +module Fog + module Compute + class OpenStack + + class Flavors < Fog::Collection + + model Fog::Compute::OpenStack::Flavor + + def all + data = connection.list_flavors_detail.body['flavors'] + load(data) + end + + def get(flavor_id) + data = connection.get_flavor_details(flavor_id).body['flavor'] + new(data) + rescue Fog::Compute::OpenStack::NotFound + nil + end + + end + + end + end +end diff --git a/lib/fog/openstack/models/compute/image.rb b/lib/fog/openstack/models/compute/image.rb new file mode 100644 index 0000000000..0b5646a990 --- /dev/null +++ b/lib/fog/openstack/models/compute/image.rb @@ -0,0 +1,57 @@ +require 'fog/core/model' +require 'fog/openstack/models/compute/metadata' + +module Fog + module Compute + class OpenStack + + class Image < Fog::Model + + identity :id + + attribute :name + attribute :created_at, :aliases => 'created' + attribute :updated_at, :aliases => 'updated' + attribute :progress + attribute :status + attribute :minDisk + attribute :minRam + attribute :server, :aliases => 'server' + attribute :metadata + attribute :links + + def initialize(attributes) + @connection = attributes[:connection] + super + end + + def metadata + @metadata ||= begin + Fog::Compute::OpenStack::Metadata.new({ + :connection => connection, + :parent => self + }) + end + end + + def metadata=(new_metadata={}) + metas = [] + new_metadata.each_pair {|k,v| metas << {"key" => k, "value" => v} } + metadata.load(metas) + end + + def destroy + requires :id + connection.delete_image(id) + true + end + + def ready? + status == 'ACTIVE' + end + + end + + end + end +end diff --git a/lib/fog/openstack/models/compute/images.rb b/lib/fog/openstack/models/compute/images.rb new file mode 100644 index 0000000000..c77a206db2 --- /dev/null +++ b/lib/fog/openstack/models/compute/images.rb @@ -0,0 +1,33 @@ +require 'fog/core/collection' +require 'fog/openstack/models/compute/image' + +module Fog + module Compute + class OpenStack + + class Images < Fog::Collection + + model Fog::Compute::OpenStack::Image + + attribute :server + + def all + data = connection.list_images_detail.body['images'] + load(data) + if server + self.replace(self.select {|image| image.server_id == server.id}) + end + end + + def get(image_id) + data = connection.get_image_details(image_id).body['image'] + new(data) + rescue Fog::Compute::OpenStack::NotFound + nil + end + + end + + end + end +end diff --git a/lib/fog/openstack/models/compute/meta.rb b/lib/fog/openstack/models/compute/meta.rb new file mode 100644 index 0000000000..06d6a26928 --- /dev/null +++ b/lib/fog/openstack/models/compute/meta.rb @@ -0,0 +1,29 @@ +require 'fog/core/model' +require 'fog/openstack/models/meta_parent' + +module Fog + module Compute + class OpenStack + class Meta < Fog::Model + + include Fog::Compute::OpenStack::MetaParent + + identity :key + attribute :value + + def destroy + requires :identity + connection.delete_meta(collection_name, @parent.id, key) + true + end + + def save + requires :identity, :value + connection.update_meta(collection_name, @parent.id, key, value) + true + end + + end + end + end +end diff --git a/lib/fog/openstack/models/compute/metadata.rb b/lib/fog/openstack/models/compute/metadata.rb new file mode 100644 index 0000000000..7da2a33883 --- /dev/null +++ b/lib/fog/openstack/models/compute/metadata.rb @@ -0,0 +1,69 @@ +require 'fog/core/collection' +require 'fog/openstack/models/meta_parent' +require 'fog/openstack/models/compute/meta' +require 'fog/openstack/models/compute/image' +require 'fog/openstack/models/compute/server' + +module Fog + module Compute + class OpenStack + + class Metadata < Fog::Collection + + model Fog::Compute::OpenStack::Meta + + include Fog::Compute::OpenStack::MetaParent + + def all + requires :parent + metadata = connection.list_metadata(collection_name, @parent.id).body['metadata'] + metas = [] + metadata.each_pair {|k,v| metas << {"key" => k, "value" => v} } + load(metas) + end + + def get(key) + requires :parent + data = connection.get_meta(collection_name, @parent.id, key).body["meta"] + metas = [] + data.each_pair {|k,v| metas << {"key" => k, "value" => v} } + new(metas[0]) + rescue Fog::Compute::OpenStack::NotFound + nil + end + + def update(data=nil) + requires :parent + connection.update_metadata(collection_name, @parent.id, meta_hash(data)) + end + + def set(data=nil) + requires :parent + connection.set_metadata(collection_name, @parent.id, meta_hash(data)) + end + + def new(attributes = {}) + requires :parent + super({ :parent => @parent }.merge!(attributes)) + end + + private + def meta_hash(data=nil) + if data.nil? + data={} + self.each do |meta| + if meta.is_a?(Fog::Compute::OpenStack::Meta) then + data.store(meta.key, meta.value) + else + data.store(meta["key"], meta["value"]) + end + end + end + data + end + + end + + end + end +end diff --git a/lib/fog/openstack/models/compute/server.rb b/lib/fog/openstack/models/compute/server.rb new file mode 100644 index 0000000000..e42ad402cf --- /dev/null +++ b/lib/fog/openstack/models/compute/server.rb @@ -0,0 +1,201 @@ +require 'fog/compute/models/server' +require 'fog/openstack/models/compute/metadata' + +module Fog + module Compute + class OpenStack + + class Server < Fog::Compute::Server + + identity :id + + attribute :addresses + attribute :flavor + attribute :host_id, :aliases => 'hostId' + attribute :image + attribute :metadata + attribute :links + attribute :name + attribute :personality + attribute :progress + attribute :accessIPv4 + attribute :accessIPv6 + attribute :availability_zone + attribute :user_data_encoded + attribute :state, :aliases => 'status' + + attr_reader :password + attr_writer :private_key, :private_key_path, :public_key, :public_key_path, :username, :image_ref, :flavor_ref + + def initialize(attributes={}) + @connection = attributes[:connection] + attributes[:metadata] = {} + super + end + + def metadata + @metadata ||= begin + Fog::Compute::OpenStack::Metadata.new({ + :connection => connection, + :parent => self + }) + end + end + + def metadata=(new_metadata={}) + metas = [] + new_metadata.each_pair {|k,v| metas << {"key" => k, "value" => v} } + metadata.load(metas) + end + + def user_data=(ascii_userdata) + self.user_data_encoded = [ascii_userdata].pack('m') + end + + def destroy + requires :id + connection.delete_server(id) + true + end + + def images + requires :id + connection.images(:server => self) + end + + def private_ip_address + nil + end + + def private_key_path + @private_key_path ||= Fog.credentials[:private_key_path] + @private_key_path &&= File.expand_path(@private_key_path) + end + + def private_key + @private_key ||= private_key_path && File.read(private_key_path) + end + + def public_ip_address + addresses['public'].first + end + + def public_key_path + @public_key_path ||= Fog.credentials[:public_key_path] + @public_key_path &&= File.expand_path(@public_key_path) + end + + def public_key + @public_key ||= public_key_path && File.read(public_key_path) + end + + def image_ref + @image_ref + end + + def image_ref=(new_image_ref) + @image_ref = new_image_ref + end + + def flavor_ref + @flavor_ref + end + + def flavor_ref=(new_flavor_ref) + @flavor_ref = new_flavor_ref + end + + def ready? + self.state == 'ACTIVE' + end + + def change_password(admin_password) + requires :id + connection.change_password_server(id, admin_password) + true + end + + def rebuild(image_ref, name, admin_pass=nil, metadata=nil, personality=nil) + requires :id + connection.rebuild_server(id, image_ref, name, admin_pass, metadata, personality) + true + end + + def resize(flavor_ref) + requires :id + connection.resize_server(id, flavor_ref) + true + end + + def revert_resize + requires :id + connection.revert_resize_server(id) + true + end + + def confirm_resize + requires :id + connection.confirm_resize_server(id) + true + end + + def reboot(type = 'SOFT') + requires :id + connection.reboot_server(id, type) + true + end + + def create_image(name, metadata={}) + requires :id + connection.create_image(id, name, metadata) + end + + def save + raise Fog::Errors::Error.new('Resaving an existing object may create a duplicate') if identity + requires :flavor_ref, :image_ref, :name + meta_hash = {} + metadata.each { |meta| meta_hash.store(meta.key, meta.value) } + options = { + 'metadata' => meta_hash, + 'personality' => personality, + 'accessIPv4' => accessIPv4, + 'accessIPv6' => accessIPv6, + 'availability_zone' => availability_zone, + 'user_data' => user_data_encoded + } + options = options.reject {|key, value| value.nil?} + data = connection.create_server(name, image_ref, flavor_ref, options) + merge_attributes(data.body['server']) + true + end + + def setup(credentials = {}) + requires :public_ip_address, :identity, :public_key, :username + Fog::SSH.new(public_ip_address, username, credentials).run([ + %{mkdir .ssh}, + %{echo "#{public_key}" >> ~/.ssh/authorized_keys}, + %{passwd -l #{username}}, + %{echo "#{MultiJson.encode(attributes)}" >> ~/attributes.json}, + %{echo "#{MultiJson.encode(metadata)}" >> ~/metadata.json} + ]) + rescue Errno::ECONNREFUSED + sleep(1) + retry + end + + def username + @username ||= 'root' + end + + private + + def adminPass=(new_admin_pass) + @password = new_admin_pass + end + + end + + end + end + +end diff --git a/lib/fog/openstack/models/compute/servers.rb b/lib/fog/openstack/models/compute/servers.rb new file mode 100644 index 0000000000..de36198d43 --- /dev/null +++ b/lib/fog/openstack/models/compute/servers.rb @@ -0,0 +1,36 @@ +require 'fog/core/collection' +require 'fog/openstack/models/compute/server' + +module Fog + module Compute + class OpenStack + + class Servers < Fog::Collection + + model Fog::Compute::OpenStack::Server + + def all + data = connection.list_servers_detail.body['servers'] + load(data) + end + + def bootstrap(new_attributes = {}) + server = create(new_attributes) + server.wait_for { ready? } + server.setup(:password => server.password) + server + end + + def get(server_id) + if server = connection.get_server_details(server_id).body['server'] + new(server) + end + rescue Fog::Compute::OpenStack::NotFound + nil + end + + end + + end + end +end diff --git a/lib/fog/openstack/models/meta_parent.rb b/lib/fog/openstack/models/meta_parent.rb new file mode 100644 index 0000000000..12f591e86f --- /dev/null +++ b/lib/fog/openstack/models/meta_parent.rb @@ -0,0 +1,33 @@ +module Fog + module Compute + class OpenStack + module MetaParent + + def parent + @parent + end + + def parent=(new_parent) + @parent = new_parent + end + + def collection_name + if @parent.class == Fog::Compute::OpenStack::Image + return "images" + elsif @parent.class == Fog::Compute::OpenStack::Server + return "servers" + else + raise "Metadata is not supported for this model type." + end + end + + def metas_to_hash(metas) + hash = {} + metas.each { |meta| hash.store(meta.key, meta.value) } + hash + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/change_password_server.rb b/lib/fog/openstack/requests/compute/change_password_server.rb new file mode 100644 index 0000000000..c28cdaa286 --- /dev/null +++ b/lib/fog/openstack/requests/compute/change_password_server.rb @@ -0,0 +1,24 @@ +module Fog + module Compute + class OpenStack + class Real + + def change_password_server(server_id, admin_password) + body = { 'changePassword' => { 'adminPass' => admin_password }} + server_action(server_id, body) + end + + end + + class Mock + + def change_password_server(server_id, admin_password) + response = Excon::Response.new + response.status = 202 + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/confirm_resized_server.rb b/lib/fog/openstack/requests/compute/confirm_resized_server.rb new file mode 100644 index 0000000000..d931d92dc5 --- /dev/null +++ b/lib/fog/openstack/requests/compute/confirm_resized_server.rb @@ -0,0 +1,24 @@ +module Fog + module Compute + class OpenStack + class Real + + def confirm_resized_server(server_id) + body = { 'confirmResize' => nil } + server_action(server_id, body, 204) + end + + end + + class Mock + + def confirm_resized_server(server_id) + response = Excon::Response.new + response.status = 204 + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/create_image.rb b/lib/fog/openstack/requests/compute/create_image.rb new file mode 100644 index 0000000000..7efc4238ae --- /dev/null +++ b/lib/fog/openstack/requests/compute/create_image.rb @@ -0,0 +1,49 @@ +module Fog + module Compute + class OpenStack + class Real + + def create_image(server_id, name, metadata={}) + body = { 'createImage' => { + 'name' => name, + 'metadata' => metadata + }} + data = server_action(server_id, body) + image_id = data.headers["Location"].scan(/.*\/(.*)/).flatten + get_image_details(image_id) + end + + end + + class Mock + + def create_image(server_id, name, metadata={}) + response = Excon::Response.new + response.status = 202 + + img_id=Fog::Mock.random_numbers(6).to_s + + data = { + 'id' => img_id, + 'server' => {"id"=>"3", "links"=>[{"href"=>"http://nova1:8774/admin/servers/#{server_id}", "rel"=>"bookmark"}]}, + 'links' => [{"href"=>"http://nova1:8774/v1.1/admin/images/#{img_id}", "rel"=>"self"}, {"href"=>"http://nova1:8774/admin/images/#{img_id}", "rel"=>"bookmark"}], + 'metadata' => metadata || {}, + 'name' => name || "server_#{rand(999)}", + 'progress' => 0, + 'status' => 'SAVING', + 'minDisk' => 0, + 'minRam' => 0, + 'updated' => "", + 'created' => "" + } + self.data[:last_modified][:images][data['id']] = Time.now + self.data[:images][data['id']] = data + response.body = { 'image' => data } + response + + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/create_server.rb b/lib/fog/openstack/requests/compute/create_server.rb new file mode 100644 index 0000000000..c2e20b539f --- /dev/null +++ b/lib/fog/openstack/requests/compute/create_server.rb @@ -0,0 +1,70 @@ +module Fog + module Compute + class OpenStack + class Real + + def create_server(name, image_ref, flavor_ref, options = {}) + data = { + 'server' => { + 'flavorRef' => flavor_ref, + 'imageRef' => image_ref, + 'name' => name + } + } + + vanilla_options = ['metadata', 'accessIPv4', 'accessIPv6', + 'availability_zone', 'user_data'] + vanilla_options.select{|o| options[o]}.each do |key| + data['server'][key] = options[key] + end + + if options['personality'] + data['server']['personality'] = [] + for file in options['personality'] + data['server']['personality'] << { + 'contents' => Base64.encode64(file['contents']), + 'path' => file['path'] + } + end + end + + request( + :body => MultiJson.encode(data), + :expects => [200, 202], + :method => 'POST', + :path => 'servers.json' + ) + end + + end + + class Mock + + def create_server(name, image_ref, flavor_ref, options = {}) + response = Excon::Response.new + response.status = 202 + + data = { + 'addresses' => {}, + 'flavor' => {"id"=>"1", "links"=>[{"href"=>"http://nova1:8774/admin/flavors/1", "rel"=>"bookmark"}]}, + 'id' => Fog::Mock.random_numbers(6).to_s, + 'image' => {"id"=>"3", "links"=>[{"href"=>"http://nova1:8774/admin/images/3", "rel"=>"bookmark"}]}, + 'links' => [{"href"=>"http://nova1:8774/v1.1/admin/servers/5", "rel"=>"self"}, {"href"=>"http://nova1:8774/admin/servers/5", "rel"=>"bookmark"}], + 'hostId' => "123456789ABCDEF01234567890ABCDEF", + 'metadata' => options['metadata'] || {}, + 'name' => options['name'] || "server_#{rand(999)}", + 'accessIPv4' => options['accessIPv4'] || "", + 'accessIPv6' => options['accessIPv6'] || "", + 'progress' => 0, + 'status' => 'BUILD' + } + self.data[:last_modified][:servers][data['id']] = Time.now + self.data[:servers][data['id']] = data + response.body = { 'server' => data.merge({'adminPass' => 'password'}) } + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/delete_image.rb b/lib/fog/openstack/requests/compute/delete_image.rb new file mode 100644 index 0000000000..bbae7f2197 --- /dev/null +++ b/lib/fog/openstack/requests/compute/delete_image.rb @@ -0,0 +1,40 @@ +module Fog + module Compute + class OpenStack + class Real + + def delete_image(image_id) + request( + :expects => 204, + :method => 'DELETE', + :path => "images/#{image_id}" + ) + end + + end + + class Mock + + def delete_image(image_id) + response = Excon::Response.new + if image = list_images_detail.body['images'].detect {|_| _['id'] == image_id} + if image['status'] == 'SAVING' + response.status = 409 + raise(Excon::Errors.status_error({:expects => 202}, response)) + else + self.data[:last_modified][:images].delete(image_id) + self.data[:images].delete(image_id) + response.status = 202 + end + response + else + response.status = 400 + raise(Excon::Errors.status_error({:expects => 202}, response)) + end + + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/delete_meta.rb b/lib/fog/openstack/requests/compute/delete_meta.rb new file mode 100644 index 0000000000..f69c4db7eb --- /dev/null +++ b/lib/fog/openstack/requests/compute/delete_meta.rb @@ -0,0 +1,28 @@ +module Fog + module Compute + class OpenStack + class Real + + def delete_meta(collection_name, parent_id, key) + request( + :expects => 204, + :method => 'DELETE', + :path => "#{collection_name}/#{parent_id}/metadata/#{key}" + ) + end + + end + + class Mock + + def delete_meta(collection_name, parent_id, key) + response = Excon::Response.new + response.status = 204 + response + end + + end + + end + end +end diff --git a/lib/fog/openstack/requests/compute/delete_server.rb b/lib/fog/openstack/requests/compute/delete_server.rb new file mode 100644 index 0000000000..b6424749a8 --- /dev/null +++ b/lib/fog/openstack/requests/compute/delete_server.rb @@ -0,0 +1,38 @@ +module Fog + module Compute + class OpenStack + class Real + + def delete_server(server_id) + request( + :expects => 204, + :method => 'DELETE', + :path => "servers/#{server_id}" + ) + end + + end + + class Mock + + def delete_server(server_id) + response = Excon::Response.new + if server = list_servers_detail.body['servers'].detect {|_| _['id'] == server_id} + if server['status'] == 'BUILD' + response.status = 409 + raise(Excon::Errors.status_error({:expects => 204}, response)) + else + self.data[:last_modified][:servers].delete(server_id) + self.data[:servers].delete(server_id) + response.status = 204 + end + response + else + raise Fog::Compute::OpenStack::NotFound + end + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/get_flavor_details.rb b/lib/fog/openstack/requests/compute/get_flavor_details.rb new file mode 100644 index 0000000000..c645572e99 --- /dev/null +++ b/lib/fog/openstack/requests/compute/get_flavor_details.rb @@ -0,0 +1,43 @@ +module Fog + module Compute + class OpenStack + class Real + + def get_flavor_details(flavor_ref) + request( + :expects => [200, 203], + :method => 'GET', + :path => "flavors/#{flavor_ref}.json" + ) + end + + end + + class Mock + + def get_flavor_details(flavor_ref) + response = Excon::Response.new + flavor = { + '1' => { 'id' => '1', 'name' => '256 server', 'ram' => 256, 'disk' => 10, 'links' => [] }, + '2' => { 'id' => '2', 'name' => '512 server', 'ram' => 512, 'disk' => 20, 'links' => [] }, + '3' => { 'id' => '3', 'name' => '1GB server', 'ram' => 1024, 'disk' => 40, 'links' => [] }, + '4' => { 'id' => '4', 'name' => '2GB server', 'ram' => 2048, 'disk' => 80, 'links' => [] }, + '5' => { 'id' => '5', 'name' => '4GB server', 'ram' => 4096, 'disk' => 160, 'links' => [] }, + '6' => { 'id' => '6', 'name' => '8GB server', 'ram' => 8192, 'disk' => 320, 'links' => [] }, + '7' => { 'id' => '7', 'name' => '15.5GB server', 'ram' => 15872, 'disk' => 620, 'links' => [] } + }[flavor_ref] + if flavor + response.status = 200 + response.body = { + 'flavor' => flavor + } + response + else + raise Fog::Compute::OpenStack::NotFound + end + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/get_image_details.rb b/lib/fog/openstack/requests/compute/get_image_details.rb new file mode 100644 index 0000000000..630a070d72 --- /dev/null +++ b/lib/fog/openstack/requests/compute/get_image_details.rb @@ -0,0 +1,33 @@ +module Fog + module Compute + class OpenStack + class Real + + def get_image_details(image_id) + request( + :expects => [200, 203], + :method => 'GET', + :path => "images/#{image_id}.json" + ) + end + + end + + class Mock + + def get_image_details(image_id) + response = Excon::Response.new + if image = list_images_detail.body['images'].detect {|_| _['id'] == image_id} + response.status = [200, 203][rand(1)] + response.body = { 'image' => image } + response + else + raise Fog::Compute::OpenStack::NotFound + end + end + + end + + end + end +end diff --git a/lib/fog/openstack/requests/compute/get_meta.rb b/lib/fog/openstack/requests/compute/get_meta.rb new file mode 100644 index 0000000000..5d8332b2d0 --- /dev/null +++ b/lib/fog/openstack/requests/compute/get_meta.rb @@ -0,0 +1,29 @@ +module Fog + module Compute + class OpenStack + class Real + + def get_meta(collection_name, parent_id, key) + request( + :expects => [200, 203], + :method => 'GET', + :path => "#{collection_name}/#{parent_id}/metadata/#{key}" + ) + end + + end + + class Mock + + def get_meta(collection_name, parent_id, key) + response = Excon::Response.new + response.status = 200 + response.body = { 'meta' => {} } + response + end + + end + + end + end +end diff --git a/lib/fog/openstack/requests/compute/get_server_details.rb b/lib/fog/openstack/requests/compute/get_server_details.rb new file mode 100644 index 0000000000..13bd036525 --- /dev/null +++ b/lib/fog/openstack/requests/compute/get_server_details.rb @@ -0,0 +1,32 @@ +module Fog + module Compute + class OpenStack + class Real + + def get_server_details(server_id) + request( + :expects => [200, 203], + :method => 'GET', + :path => "servers/#{server_id}.json" + ) + end + + end + + class Mock + + def get_server_details(server_id) + response = Excon::Response.new + if server = list_servers_detail.body['servers'].detect {|_| _['id'] == server_id} + response.status = [200, 203][rand(1)] + response.body = { 'server' => server } + response + else + raise Fog::Compute::OpenStack::NotFound + end + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/list_addresses.rb b/lib/fog/openstack/requests/compute/list_addresses.rb new file mode 100644 index 0000000000..7fe046c948 --- /dev/null +++ b/lib/fog/openstack/requests/compute/list_addresses.rb @@ -0,0 +1,32 @@ +module Fog + module Compute + class OpenStack + class Real + + def list_addresses(server_id) + request( + :expects => [200, 203], + :method => 'GET', + :path => "servers/#{server_id}/ips.json" + ) + end + + end + + class Mock + + def list_addresses(server_id) + response = Excon::Response.new + if server = list_servers_detail.body['servers'].detect {|_| _['id'] == server_id} + response.status = [200, 203][rand(1)] + response.body = { 'addresses' => server['addresses'] } + response + else + raise Fog::Compute::OpenStack::NotFound + end + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/list_flavors.rb b/lib/fog/openstack/requests/compute/list_flavors.rb new file mode 100644 index 0000000000..4e491cc8a2 --- /dev/null +++ b/lib/fog/openstack/requests/compute/list_flavors.rb @@ -0,0 +1,38 @@ +module Fog + module Compute + class OpenStack + class Real + + def list_flavors + request( + :expects => [200, 203], + :method => 'GET', + :path => 'flavors.json' + ) + end + + end + + class Mock + + def list_flavors + response = Excon::Response.new + response.status = 200 + response.body = { + 'flavors' => [ + { 'name' => '256 server', 'id' => '1', 'links' => [] }, + { 'name' => '512 server', 'id' => '2', 'links' => [] }, + { 'name' => '1GB server', 'id' => '3', 'links' => [] }, + { 'name' => '2GB server', 'id' => '4', 'links' => [] }, + { 'name' => '4GB server', 'id' => '5', 'links' => [] }, + { 'name' => '8GB server', 'id' => '6', 'links' => [] }, + { 'name' => '15.5GB server', 'id' => '7', 'links' => [] } + ] + } + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/list_flavors_detail.rb b/lib/fog/openstack/requests/compute/list_flavors_detail.rb new file mode 100644 index 0000000000..6ba95bc3a4 --- /dev/null +++ b/lib/fog/openstack/requests/compute/list_flavors_detail.rb @@ -0,0 +1,38 @@ +module Fog + module Compute + class OpenStack + class Real + + def list_flavors_detail + request( + :expects => [200, 203], + :method => 'GET', + :path => 'flavors/detail.json' + ) + end + + end + + class Mock + + def list_flavors_detail + response = Excon::Response.new + response.status = 200 + response.body = { + 'flavors' => [ + { 'name' => '256 server', 'id' => '1', 'ram' => 256, 'disk' => 10, 'links' => [] }, + { 'name' => '512 server', 'id' => '2', 'ram' => 512, 'disk' => 20, 'links' => [] }, + { 'name' => '1GB server', 'id' => '3', 'ram' => 1024, 'disk' => 40, 'links' => [] }, + { 'name' => '2GB server', 'id' => '4', 'ram' => 2048, 'disk' => 80, 'links' => [] }, + { 'name' => '4GB server', 'id' => '5', 'ram' => 4096, 'disk' => 160, 'links' => [] }, + { 'name' => '8GB server', 'id' => '6', 'ram' => 8192, 'disk' => 320, 'links' => [] }, + { 'name' => '15.5GB server', 'id' => '7', 'ram' => 15872, 'disk' => 620, 'links' => [] } + ] + } + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/list_images.rb b/lib/fog/openstack/requests/compute/list_images.rb new file mode 100644 index 0000000000..8ad0405c98 --- /dev/null +++ b/lib/fog/openstack/requests/compute/list_images.rb @@ -0,0 +1,33 @@ +module Fog + module Compute + class OpenStack + class Real + + def list_images + request( + :expects => [200, 203], + :method => 'GET', + :path => 'images.json' + ) + end + + end + + class Mock + + def list_images + response = Excon::Response.new + data = list_images_detail.body['images'] + images = [] + for image in data + images << image.reject { |key, value| !['id', 'name', 'links'].include?(key) } + end + response.status = [200, 203][rand(1)] + response.body = { 'images' => images } + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/list_images_detail.rb b/lib/fog/openstack/requests/compute/list_images_detail.rb new file mode 100644 index 0000000000..86c840233f --- /dev/null +++ b/lib/fog/openstack/requests/compute/list_images_detail.rb @@ -0,0 +1,39 @@ +module Fog + module Compute + class OpenStack + class Real + + def list_images_detail + request( + :expects => [200, 203], + :method => 'GET', + :path => 'images/detail.json' + ) + end + + end + + class Mock + + def list_images_detail + response = Excon::Response.new + + images = self.data[:images].values + for image in images + case image['status'] + when 'SAVING' + if Time.now - self.data[:last_modified][:images][image['id']] >= Fog::Mock.delay + image['status'] = 'ACTIVE' + end + end + end + + response.status = [200, 203][rand(1)] + response.body = { 'images' => images.map {|image| image.reject {|key, value| !['id', 'name', 'links', 'minRam', 'minDisk', 'metadata', 'status', 'updated'].include?(key)}} } + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/list_metadata.rb b/lib/fog/openstack/requests/compute/list_metadata.rb new file mode 100644 index 0000000000..7ba1346f75 --- /dev/null +++ b/lib/fog/openstack/requests/compute/list_metadata.rb @@ -0,0 +1,28 @@ +module Fog + module Compute + class OpenStack + class Real + + def list_metadata(collection_name, parent_id) + request( + :expects => [200, 203], + :method => 'GET', + :path => "/#{collection_name}/#{parent_id}/metadata.json" + ) + end + + end + + class Mock + + def list_metadata(collection_name, parent_id) + response = Excon::Response.new + response.status = 200 + response.body = {} + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/list_private_addresses.rb b/lib/fog/openstack/requests/compute/list_private_addresses.rb new file mode 100644 index 0000000000..904d467611 --- /dev/null +++ b/lib/fog/openstack/requests/compute/list_private_addresses.rb @@ -0,0 +1,32 @@ +module Fog + module Compute + class OpenStack + class Real + + def list_private_addresses(server_id) + request( + :expects => [200, 203], + :method => 'GET', + :path => "servers/#{server_id}/ips/private.json" + ) + end + + end + + class Mock + + def list_private_addresses(server_id) + response = Excon::Response.new + if server = list_servers_detail.body['servers'].detect {|_| _['id'] == server_id} + response.status = [200, 203][rand(1)] + response.body = { 'private' => server['addresses']['private'] } + response + else + raise Fog::Compute::Rackspace::NotFound + end + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/list_public_addresses.rb b/lib/fog/openstack/requests/compute/list_public_addresses.rb new file mode 100644 index 0000000000..cbb3c32822 --- /dev/null +++ b/lib/fog/openstack/requests/compute/list_public_addresses.rb @@ -0,0 +1,32 @@ +module Fog + module Compute + class OpenStack + class Real + + def list_public_addresses(server_id) + request( + :expects => [200, 203], + :method => 'GET', + :path => "servers/#{server_id}/ips/public.json" + ) + end + + end + + class Mock + + def list_public_addresses(server_id) + response = Excon::Response.new + if server = list_servers_detail.body['servers'].detect {|_| _['id'] == server_id} + response.status = [200, 203][rand(1)] + response.body = { 'public' => server['addresses']['public'] } + response + else + raise Fog::Compute::Rackspace::NotFound + end + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/list_servers.rb b/lib/fog/openstack/requests/compute/list_servers.rb new file mode 100644 index 0000000000..a83b892f67 --- /dev/null +++ b/lib/fog/openstack/requests/compute/list_servers.rb @@ -0,0 +1,33 @@ +module Fog + module Compute + class OpenStack + class Real + + def list_servers + request( + :expects => [200, 203], + :method => 'GET', + :path => 'servers.json' + ) + end + + end + + class Mock + + def list_servers + response = Excon::Response.new + data = list_servers_detail.body['servers'] + servers = [] + for server in data + servers << server.reject { |key, value| !['id', 'name', 'links'].include?(key) } + end + response.status = [200, 203][rand(1)] + response.body = { 'servers' => servers } + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/list_servers_detail.rb b/lib/fog/openstack/requests/compute/list_servers_detail.rb new file mode 100644 index 0000000000..a6cfe32348 --- /dev/null +++ b/lib/fog/openstack/requests/compute/list_servers_detail.rb @@ -0,0 +1,39 @@ +module Fog + module Compute + class OpenStack + class Real + + def list_servers_detail + request( + :expects => [200, 203], + :method => 'GET', + :path => 'servers/detail.json' + ) + end + + end + + class Mock + + def list_servers_detail + response = Excon::Response.new + + servers = self.data[:servers].values + for server in servers + case server['status'] + when 'BUILD' + if Time.now - self.data[:last_modified][:servers][server['id']] > Fog::Mock.delay * 2 + server['status'] = 'ACTIVE' + end + end + end + + response.status = [200, 203][rand(1)] + response.body = { 'servers' => servers } + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/reboot_server.rb b/lib/fog/openstack/requests/compute/reboot_server.rb new file mode 100644 index 0000000000..0cfd72d803 --- /dev/null +++ b/lib/fog/openstack/requests/compute/reboot_server.rb @@ -0,0 +1,24 @@ +module Fog + module Compute + class OpenStack + class Real + + def reboot_server(server_id, type = 'SOFT') + body = { 'reboot' => { 'type' => type }} + server_action(server_id, body) + end + + end + + class Mock + + def reboot_server(server_id, type = 'SOFT') + response = Excon::Response.new + response.status = 202 + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/rebuild_server.rb b/lib/fog/openstack/requests/compute/rebuild_server.rb new file mode 100644 index 0000000000..c4f23e6b1d --- /dev/null +++ b/lib/fog/openstack/requests/compute/rebuild_server.rb @@ -0,0 +1,31 @@ +module Fog + module Compute + class OpenStack + class Real + + def rebuild_server(server_id, image_ref, name, admin_pass=nil, metadata=nil, personality=nil) + + body = { 'rebuild' => { + 'imageRef' => image_ref, + 'name' => name + }} + body['rebuild']['adminPass'] = admin_pass if admin_pass + body['rebuild']['metadata'] = metadata if metadata + body['rebuild']['personality'] = personality if personality + server_action(server_id, body, 202) + end + + end + + class Mock + + def rebuild_server(server_id, image_ref, name, admin_pass=nil, metadata=nil, personality=nil) + response = get_server_details(server_id) + response.body['server']['status'] = "REBUILD" + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/resize_server.rb b/lib/fog/openstack/requests/compute/resize_server.rb new file mode 100644 index 0000000000..bb14b6dab0 --- /dev/null +++ b/lib/fog/openstack/requests/compute/resize_server.rb @@ -0,0 +1,24 @@ +module Fog + module Compute + class OpenStack + class Real + + def resize_server(server_id, flavor_ref) + body = { 'resize' => { 'flavorRef' => flavor_ref }} + server_action(server_id, body) + end + + end + + class Mock + + def resize_server(server_id, flavor_ref) + response = Excon::Response.new + response.status = 202 + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/revert_resized_server.rb b/lib/fog/openstack/requests/compute/revert_resized_server.rb new file mode 100644 index 0000000000..6b6a7020e7 --- /dev/null +++ b/lib/fog/openstack/requests/compute/revert_resized_server.rb @@ -0,0 +1,30 @@ +module Fog + module Compute + class OpenStack + class Real + + def revert_resized_server(server_id) + body = { 'revertResize' => nil } + server_action(server_id, body) + end + + end + + class Mock + + def revert_resized_server(server_id) + response = Excon::Response.new + response.status = 202 + + self.data[:servers][server_id]['flavorId'] = self.data[:servers][server_id]['old_flavorId'] + self.data[:servers][server_id].delete('old_flavorId') + self.data[:last_modified][:servers][server_id] = Time.now + self.data[:servers][server_id]['status'] = 'ACTIVE' + + response + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/server_action.rb b/lib/fog/openstack/requests/compute/server_action.rb new file mode 100644 index 0000000000..0da8e403b9 --- /dev/null +++ b/lib/fog/openstack/requests/compute/server_action.rb @@ -0,0 +1,18 @@ +module Fog + module Compute + class OpenStack + class Real + + def server_action(server_id, body, expects=202) + request( + :body => MultiJson.encode(body), + :expects => expects, + :method => 'POST', + :path => "servers/#{server_id}/action.json" + ) + end + + end + end + end +end diff --git a/lib/fog/openstack/requests/compute/set_metadata.rb b/lib/fog/openstack/requests/compute/set_metadata.rb new file mode 100644 index 0000000000..45c61ae833 --- /dev/null +++ b/lib/fog/openstack/requests/compute/set_metadata.rb @@ -0,0 +1,45 @@ +module Fog + module Compute + class OpenStack + + class Real + + def set_metadata(collection_name, parent_id, metadata = {}) + request( + :body => MultiJson.encode({ 'metadata' => metadata }), + :expects => 200, + :method => 'PUT', + :path => "#{collection_name}/#{parent_id}/metadata" + ) + end + + end + + class Mock + + def set_metadata(collection_name, parent_id, metadata = {}) + + if collection_name == "images" then + if not list_images_detail.body['images'].detect {|_| _['id'] == parent_id} + raise Fog::Compute::OpenStack::NotFound + end + end + + if collection_name == "servers" then + if not list_servers_detail.body['servers'].detect {|_| _['id'] == parent_id} + raise Fog::Compute::OpenStack::NotFound + end + end + + response = Excon::Response.new + response.body = { "metadata" => metadata } + response.status = 200 + response + + end + + end + + end + end +end diff --git a/lib/fog/openstack/requests/compute/update_meta.rb b/lib/fog/openstack/requests/compute/update_meta.rb new file mode 100644 index 0000000000..074c95ac54 --- /dev/null +++ b/lib/fog/openstack/requests/compute/update_meta.rb @@ -0,0 +1,45 @@ +module Fog + module Compute + class OpenStack + + class Real + + def update_meta(collection_name, parent_id, key, value) + request( + :body => MultiJson.encode({ 'meta' => { key => value }}), + :expects => 200, + :method => 'PUT', + :path => "#{collection_name}/#{parent_id}/metadata/#{key}" + ) + end + + end + + class Mock + + def update_meta(collection_name, parent_id, key, value) + + if collection_name == "images" then + if not list_images_detail.body['images'].detect {|_| _['id'] == parent_id} + raise Fog::Compute::OpenStack::NotFound + end + end + + if collection_name == "servers" then + if not list_servers_detail.body['servers'].detect {|_| _['id'] == parent_id} + raise Fog::Compute::OpenStack::NotFound + end + end + + response = Excon::Response.new + response.body = { "meta" => { key => value } } + response.status = 200 + response + + end + + end + + end + end +end diff --git a/lib/fog/openstack/requests/compute/update_metadata.rb b/lib/fog/openstack/requests/compute/update_metadata.rb new file mode 100644 index 0000000000..b05a3eaacd --- /dev/null +++ b/lib/fog/openstack/requests/compute/update_metadata.rb @@ -0,0 +1,46 @@ +module Fog + module Compute + class OpenStack + + class Real + + def update_metadata(collection_name, parent_id, metadata = {}) + request( + :body => MultiJson.encode({ 'metadata' => metadata }), + :expects => 200, + :method => 'POST', + :path => "#{collection_name}/#{parent_id}/metadata.json" + ) + end + + end + + class Mock + + def update_metadata(collection_name, parent_id, metadata = {}) + + if collection_name == "images" then + if not list_images_detail.body['images'].detect {|_| _['id'] == parent_id} + raise Fog::Compute::OpenStack::NotFound + end + end + + if collection_name == "servers" then + if not list_servers_detail.body['servers'].detect {|_| _['id'] == parent_id} + raise Fog::Compute::OpenStack::NotFound + end + end + + #FIXME join w/ existing metadata here + response = Excon::Response.new + response.body = { "metadata" => metadata } + response.status = 200 + response + + end + + end + + end + end +end diff --git a/lib/fog/openstack/requests/compute/update_server.rb b/lib/fog/openstack/requests/compute/update_server.rb new file mode 100644 index 0000000000..ea9c1c62e9 --- /dev/null +++ b/lib/fog/openstack/requests/compute/update_server.rb @@ -0,0 +1,35 @@ +module Fog + module Compute + class OpenStack + class Real + + def update_server(server_id, options = {}) + request( + :body => MultiJson.encode({ 'server' => options }), + :expects => 200, + :method => 'PUT', + :path => "servers/#{server_id}.json" + ) + end + + end + + class Mock + + def update_server(server_id, options) + response = Excon::Response.new + if server = list_servers_detail.body['servers'].detect {|_| _['id'] == server_id} + if options['name'] + server['name'] = options['name'] + end + response.status = 200 + response + else + raise Fog::Compute::OpenStack::NotFound + end + end + + end + end + end +end diff --git a/lib/fog/providers.rb b/lib/fog/providers.rb index 2bdfe236fd..212e450a0d 100644 --- a/lib/fog/providers.rb +++ b/lib/fog/providers.rb @@ -14,10 +14,12 @@ require 'fog/new_servers' require 'fog/ninefold' require 'fog/rackspace' +require 'fog/openstack' require 'fog/slicehost' require 'fog/storm_on_demand' require 'fog/vcloud' require 'fog/virtual_box' +require 'fog/vmfusion' require 'fog/vsphere' require 'fog/voxel' require 'fog/zerigo' diff --git a/lib/fog/rackspace.rb b/lib/fog/rackspace.rb index 33ebe9a22a..5543e8287b 100644 --- a/lib/fog/rackspace.rb +++ b/lib/fog/rackspace.rb @@ -42,11 +42,11 @@ def self.slurp(error) end end - service(:cdn, 'rackspace/cdn') - service(:compute, 'rackspace/compute') - service(:dns, 'rackspace/dns') - service(:storage, 'rackspace/storage') - service(:load_balancers, 'rackspace/load_balancers') + service(:cdn, 'rackspace/cdn', 'CDN') + service(:compute, 'rackspace/compute', 'Compute') + service(:dns, 'rackspace/dns', 'DNS') + service(:storage, 'rackspace/storage', 'Storage') + service(:load_balancers, 'rackspace/load_balancers', 'LoadBalancers') def self.authenticate(options, connection_options = {}) rackspace_auth_url = options[:rackspace_auth_url] || "auth.api.rackspacecloud.com" @@ -70,5 +70,12 @@ def self.authenticate(options, connection_options = {}) !['X-Server-Management-Url', 'X-Storage-Url', 'X-CDN-Management-Url', 'X-Auth-Token'].include?(key) end end + + # CGI.escape, but without special treatment on spaces + def self.escape(str,extra_exclude_chars = '') + str.gsub(/([^a-zA-Z0-9_.-#{extra_exclude_chars}]+)/) do + '%' + $1.unpack('H2' * $1.bytesize).join('%').upcase + end + end end end diff --git a/lib/fog/rackspace/dns.rb b/lib/fog/rackspace/dns.rb index 3172cbdbf0..9172d0d0d0 100644 --- a/lib/fog/rackspace/dns.rb +++ b/lib/fog/rackspace/dns.rb @@ -36,6 +36,31 @@ class Rackspace < Fog::Service request :add_records class Mock + + def initialize(options={}) + @rackspace_api_key = options[:rackspace_api_key] + @rackspace_username = options[:rackspace_username] + @rackspace_auth_url = options[:rackspace_auth_url] + @connection_options = options[:connection_options] || {} + end + + def self.data + @data ||= { + } + end + + def self.reset + @data = nil + end + + def data + self.class.data + end + + def reset_data + self.class.reset + end + end class Real diff --git a/lib/fog/rackspace/models/dns/record.rb b/lib/fog/rackspace/models/dns/record.rb index 0fa54b10eb..2165561092 100644 --- a/lib/fog/rackspace/models/dns/record.rb +++ b/lib/fog/rackspace/models/dns/record.rb @@ -51,7 +51,7 @@ def create } response = wait_for_job connection.add_records(@zone.identity, [options]).body['jobId'] - merge_attributes(response.body['records'].first) + merge_attributes(response.body['request']['records'].select {|record| record['name'] == self.name && record['type'] == self.type && record['value'] == self.value}) true end diff --git a/lib/fog/rackspace/models/dns/zone.rb b/lib/fog/rackspace/models/dns/zone.rb index 193b927a5f..2b71e3dded 100644 --- a/lib/fog/rackspace/models/dns/zone.rb +++ b/lib/fog/rackspace/models/dns/zone.rb @@ -52,7 +52,7 @@ def create response = connection.create_domains([data]) response = wait_for_job response.body['jobId'] - merge_attributes(response.body['domains'].first) + merge_attributes(response.body['request']['domains'].select {|domain| domain['name'] == self.domain}) end def update diff --git a/lib/fog/rackspace/requests/storage/rackspace/directories.rb b/lib/fog/rackspace/models/storage/directories.rb similarity index 100% rename from lib/fog/rackspace/requests/storage/rackspace/directories.rb rename to lib/fog/rackspace/models/storage/directories.rb diff --git a/lib/fog/rackspace/requests/storage/rackspace/directory.rb b/lib/fog/rackspace/models/storage/directory.rb similarity index 99% rename from lib/fog/rackspace/requests/storage/rackspace/directory.rb rename to lib/fog/rackspace/models/storage/directory.rb index 2f1ea1fd9e..5f921152f5 100644 --- a/lib/fog/rackspace/requests/storage/rackspace/directory.rb +++ b/lib/fog/rackspace/models/storage/directory.rb @@ -68,7 +68,7 @@ def save end true end - + end end diff --git a/lib/fog/rackspace/requests/storage/rackspace/file.rb b/lib/fog/rackspace/models/storage/file.rb similarity index 100% rename from lib/fog/rackspace/requests/storage/rackspace/file.rb rename to lib/fog/rackspace/models/storage/file.rb diff --git a/lib/fog/rackspace/requests/storage/rackspace/files.rb b/lib/fog/rackspace/models/storage/files.rb similarity index 96% rename from lib/fog/rackspace/requests/storage/rackspace/files.rb rename to lib/fog/rackspace/models/storage/files.rb index 65b93413d3..10cfa1a022 100644 --- a/lib/fog/rackspace/requests/storage/rackspace/files.rb +++ b/lib/fog/rackspace/models/storage/files.rb @@ -67,7 +67,7 @@ def get(key, &block) def get_url(key) requires :directory if self.directory.public_url - "#{self.directory.public_url}/#{key}" + "#{self.directory.public_url}/#{Fog::Rackspace.escape(key, '/')}" end end diff --git a/lib/fog/rackspace/requests/dns/callback.rb b/lib/fog/rackspace/requests/dns/callback.rb index 7abf4f4931..d9107d5f5d 100644 --- a/lib/fog/rackspace/requests/dns/callback.rb +++ b/lib/fog/rackspace/requests/dns/callback.rb @@ -2,14 +2,15 @@ module Fog module DNS class Rackspace class Real - def callback(job_id) + def callback(job_id, show_details=true) validate_path_fragment :job_id, job_id request( :expects => [200, 202, 204], :method => 'GET', - :path => "status/#{job_id}" + :path => "status/#{job_id}", + :query => "showDetails=#{show_details}" ) end end diff --git a/lib/fog/rackspace/requests/storage/delete_container.rb b/lib/fog/rackspace/requests/storage/delete_container.rb index b6bf2d8df5..285a437a90 100644 --- a/lib/fog/rackspace/requests/storage/delete_container.rb +++ b/lib/fog/rackspace/requests/storage/delete_container.rb @@ -12,7 +12,7 @@ def delete_container(name) request( :expects => 204, :method => 'DELETE', - :path => URI.escape(name) + :path => Fog::Rackspace.escape(name) ) end diff --git a/lib/fog/rackspace/requests/storage/delete_object.rb b/lib/fog/rackspace/requests/storage/delete_object.rb index e24186020c..ddf42f4f9e 100644 --- a/lib/fog/rackspace/requests/storage/delete_object.rb +++ b/lib/fog/rackspace/requests/storage/delete_object.rb @@ -13,7 +13,7 @@ def delete_object(container, object) request( :expects => 204, :method => 'DELETE', - :path => "#{URI.escape(container)}/#{URI.escape(object)}" + :path => "#{Fog::Rackspace.escape(container)}/#{Fog::Rackspace.escape(object)}" ) end diff --git a/lib/fog/rackspace/requests/storage/get_container.rb b/lib/fog/rackspace/requests/storage/get_container.rb index 800e376d4a..dbed0b3342 100644 --- a/lib/fog/rackspace/requests/storage/get_container.rb +++ b/lib/fog/rackspace/requests/storage/get_container.rb @@ -33,7 +33,7 @@ def get_container(container, options = {}) request( :expects => 200, :method => 'GET', - :path => URI.escape(container), + :path => Fog::Rackspace.escape(container), :query => {'format' => 'json'}.merge!(options) ) end diff --git a/lib/fog/rackspace/requests/storage/get_object.rb b/lib/fog/rackspace/requests/storage/get_object.rb index 491f706efe..3cfb0c31a7 100644 --- a/lib/fog/rackspace/requests/storage/get_object.rb +++ b/lib/fog/rackspace/requests/storage/get_object.rb @@ -14,7 +14,7 @@ def get_object(container, object, &block) :block => block, :expects => 200, :method => 'GET', - :path => "#{URI.escape(container)}/#{URI.escape(object)}" + :path => "#{Fog::Rackspace.escape(container)}/#{Fog::Rackspace.escape(object)}" }, false, &block) end diff --git a/lib/fog/rackspace/requests/storage/head_container.rb b/lib/fog/rackspace/requests/storage/head_container.rb index 0ae8ef12ab..3538daee92 100644 --- a/lib/fog/rackspace/requests/storage/head_container.rb +++ b/lib/fog/rackspace/requests/storage/head_container.rb @@ -17,7 +17,7 @@ def head_container(container) request( :expects => 204, :method => 'HEAD', - :path => URI.escape(container), + :path => Fog::Rackspace.escape(container), :query => {'format' => 'json'} ) end diff --git a/lib/fog/rackspace/requests/storage/head_object.rb b/lib/fog/rackspace/requests/storage/head_object.rb index f97823c428..baee26d9be 100644 --- a/lib/fog/rackspace/requests/storage/head_object.rb +++ b/lib/fog/rackspace/requests/storage/head_object.rb @@ -13,7 +13,7 @@ def head_object(container, object) request({ :expects => 200, :method => 'HEAD', - :path => "#{URI.escape(container)}/#{URI.escape(object)}" + :path => "#{Fog::Rackspace.escape(container)}/#{Fog::Rackspace.escape(object)}" }, false) end diff --git a/lib/fog/rackspace/requests/storage/put_container.rb b/lib/fog/rackspace/requests/storage/put_container.rb index 5ad00e6e9b..32adc8003c 100644 --- a/lib/fog/rackspace/requests/storage/put_container.rb +++ b/lib/fog/rackspace/requests/storage/put_container.rb @@ -12,7 +12,7 @@ def put_container(name) request( :expects => [201, 202], :method => 'PUT', - :path => URI.escape(name) + :path => Fog::Rackspace.escape(name) ) end diff --git a/lib/fog/rackspace/requests/storage/put_object.rb b/lib/fog/rackspace/requests/storage/put_object.rb index 3a7cf66a99..2f9816bc5c 100644 --- a/lib/fog/rackspace/requests/storage/put_object.rb +++ b/lib/fog/rackspace/requests/storage/put_object.rb @@ -19,7 +19,7 @@ def put_object(container, object, data, options = {}) :expects => 201, :headers => headers, :method => 'PUT', - :path => "#{URI.escape(container)}/#{URI.escape(object)}" + :path => "#{Fog::Rackspace.escape(container)}/#{Fog::Rackspace.escape(object)}" ) end diff --git a/lib/fog/rackspace/requests/storage/put_object_manifest.rb b/lib/fog/rackspace/requests/storage/put_object_manifest.rb index 444146c6fd..fc01278409 100644 --- a/lib/fog/rackspace/requests/storage/put_object_manifest.rb +++ b/lib/fog/rackspace/requests/storage/put_object_manifest.rb @@ -10,7 +10,7 @@ class Real # * object<~String> - Name for object # def put_object_manifest(container, object) - path = "#{URI.escape(container)}/#{URI.escape(object)}" + path = "#{Fog::Rackspace.escape(container)}/#{Fog::Rackspace.escape(object)}" request( :expects => 201, :headers => {'X-Object-Manifest' => path}, diff --git a/lib/fog/slicehost.rb b/lib/fog/slicehost.rb index cc702c6b97..3a004c748f 100644 --- a/lib/fog/slicehost.rb +++ b/lib/fog/slicehost.rb @@ -5,8 +5,8 @@ module Slicehost extend Fog::Provider - service(:compute, 'slicehost/compute') - service(:dns, 'slicehost/dns') + service(:compute, 'slicehost/compute', 'Compute') + service(:dns, 'slicehost/dns', 'DNS') end end diff --git a/lib/fog/slicehost/parsers/dns/slicehost/create_record.rb b/lib/fog/slicehost/requests/dns/create_record.rb similarity index 96% rename from lib/fog/slicehost/parsers/dns/slicehost/create_record.rb rename to lib/fog/slicehost/requests/dns/create_record.rb index 5c0a054e14..b8032e739d 100644 --- a/lib/fog/slicehost/parsers/dns/slicehost/create_record.rb +++ b/lib/fog/slicehost/requests/dns/create_record.rb @@ -9,7 +9,7 @@ class Real # ==== Parameters # * record_type<~String> - type of DNS record to create (A, CNAME, etc) # * zone_id<~Integer> - ID of the zone to update - # * name<~String> - host name this DNS record is for + # * name<~String> - host name this DNS record is for # * data<~String> - data for the DNS record (ie for an A record, the IP address) # * options<~Hash> - extra parameters that are not mandatory # * ttl<~Integer> - time to live in seconds @@ -36,7 +36,7 @@ def create_record(record_type, zone_id, name, data, options = {}) optional_tags+= "#{value}" end } - + request( :body => %Q{#{record_type}#{zone_id}#{name}#{data}#{optional_tags}}, :expects => 201, diff --git a/lib/fog/slicehost/parsers/dns/slicehost/create_zone.rb b/lib/fog/slicehost/requests/dns/create_zone.rb similarity index 99% rename from lib/fog/slicehost/parsers/dns/slicehost/create_zone.rb rename to lib/fog/slicehost/requests/dns/create_zone.rb index 2a63e005af..d67f4a0f0b 100644 --- a/lib/fog/slicehost/parsers/dns/slicehost/create_zone.rb +++ b/lib/fog/slicehost/requests/dns/create_zone.rb @@ -30,7 +30,7 @@ def create_zone(origin, options = {}) optional_tags+= "#{value}" end } - + request( :body => %Q{#{origin}#{optional_tags}}, :expects => 201, diff --git a/lib/fog/slicehost/parsers/dns/slicehost/delete_record.rb b/lib/fog/slicehost/requests/dns/delete_record.rb similarity index 100% rename from lib/fog/slicehost/parsers/dns/slicehost/delete_record.rb rename to lib/fog/slicehost/requests/dns/delete_record.rb diff --git a/lib/fog/slicehost/parsers/dns/slicehost/delete_zone.rb b/lib/fog/slicehost/requests/dns/delete_zone.rb similarity index 100% rename from lib/fog/slicehost/parsers/dns/slicehost/delete_zone.rb rename to lib/fog/slicehost/requests/dns/delete_zone.rb diff --git a/lib/fog/slicehost/parsers/dns/slicehost/get_record.rb b/lib/fog/slicehost/requests/dns/get_record.rb similarity index 93% rename from lib/fog/slicehost/parsers/dns/slicehost/get_record.rb rename to lib/fog/slicehost/requests/dns/get_record.rb index 94ed477375..fada77ff17 100644 --- a/lib/fog/slicehost/parsers/dns/slicehost/get_record.rb +++ b/lib/fog/slicehost/requests/dns/get_record.rb @@ -5,14 +5,14 @@ class Real require 'fog/slicehost/parsers/dns/get_record' - # Get an individual DNS record from the specified zone + # Get an individual DNS record from the specified zone # # ==== Returns # * response<~Excon::Response>: # * body<~Hash>: # * 'record_type'<~String> - type of DNS record to create (A, CNAME, etc) # * 'zone_id'<~Integer> - ID of the zone to update - # * 'name'<~String> - host name this DNS record is for + # * 'name'<~String> - host name this DNS record is for # * 'data'<~String> - data for the DNS record (ie for an A record, the IP address) # * 'ttl'<~Integer> - time to live in seconds # * 'active'<~String> - whether this record is active or not ('Y' or 'N') diff --git a/lib/fog/slicehost/parsers/dns/slicehost/get_records.rb b/lib/fog/slicehost/requests/dns/get_records.rb similarity index 98% rename from lib/fog/slicehost/parsers/dns/slicehost/get_records.rb rename to lib/fog/slicehost/requests/dns/get_records.rb index 0246ff7eca..b56c6687e1 100644 --- a/lib/fog/slicehost/parsers/dns/slicehost/get_records.rb +++ b/lib/fog/slicehost/requests/dns/get_records.rb @@ -5,7 +5,7 @@ class Real require 'fog/slicehost/parsers/dns/get_records' - # Get all the DNS records across all the DNS zones for this account + # Get all the DNS records across all the DNS zones for this account # # ==== Returns # * response<~Excon::Response>: diff --git a/lib/fog/slicehost/parsers/dns/slicehost/get_zone.rb b/lib/fog/slicehost/requests/dns/get_zone.rb similarity index 100% rename from lib/fog/slicehost/parsers/dns/slicehost/get_zone.rb rename to lib/fog/slicehost/requests/dns/get_zone.rb diff --git a/lib/fog/slicehost/parsers/dns/slicehost/get_zones.rb b/lib/fog/slicehost/requests/dns/get_zones.rb similarity index 100% rename from lib/fog/slicehost/parsers/dns/slicehost/get_zones.rb rename to lib/fog/slicehost/requests/dns/get_zones.rb diff --git a/lib/fog/storm_on_demand.rb b/lib/fog/storm_on_demand.rb index 1e5d36d28d..c6563b378f 100644 --- a/lib/fog/storm_on_demand.rb +++ b/lib/fog/storm_on_demand.rb @@ -5,7 +5,7 @@ module StormOnDemand extend Fog::Provider - service(:compute, 'storm_on_demand/compute') + service(:compute, 'storm_on_demand/compute', 'Compute') end end diff --git a/lib/fog/terremark/vcloud.rb b/lib/fog/terremark/vcloud.rb index de3c3bee02..fef802e2db 100644 --- a/lib/fog/terremark/vcloud.rb +++ b/lib/fog/terremark/vcloud.rb @@ -15,7 +15,7 @@ module Defaults extend Fog::Terremark::Shared def self.new(options={}) - Fog::Logger.warning("Fog::Terremark::Vcloud is deprecated, to be replaced with Vcloud 1.0 someday/maybe [light_black](#{caller.first})[/]") + Fog::Logger.deprecation("Fog::Terremark::Vcloud is deprecated, to be replaced with Vcloud 1.0 someday/maybe [light_black](#{caller.first})[/]") unless @required shared_requires diff --git a/lib/fog/vcloud.rb b/lib/fog/vcloud.rb index 323fd5fbc6..c9c5ad4a09 100644 --- a/lib/fog/vcloud.rb +++ b/lib/fog/vcloud.rb @@ -5,7 +5,7 @@ module Vcloud extend Fog::Provider - service(:compute, 'vcloud/compute') + service(:compute, 'vcloud/compute', 'Compute') end end diff --git a/lib/fog/virtual_box.rb b/lib/fog/virtual_box.rb index 03d8efec2e..a806503e90 100644 --- a/lib/fog/virtual_box.rb +++ b/lib/fog/virtual_box.rb @@ -5,7 +5,7 @@ module VirtualBox extend Fog::Provider - service(:compute, 'virtual_box/compute') + service(:compute, 'virtual_box/compute', 'Compute') end end diff --git a/lib/fog/vmfusion.rb b/lib/fog/vmfusion.rb new file mode 100644 index 0000000000..4372c21ed1 --- /dev/null +++ b/lib/fog/vmfusion.rb @@ -0,0 +1,11 @@ +require(File.expand_path(File.join(File.dirname(__FILE__), 'core'))) + +module Fog + module Vmfusion + + extend Fog::Provider + + service(:compute, 'vmfusion/compute', 'Compute') + + end +end diff --git a/lib/fog/vmfusion/compute.rb b/lib/fog/vmfusion/compute.rb new file mode 100644 index 0000000000..d48a24c141 --- /dev/null +++ b/lib/fog/vmfusion/compute.rb @@ -0,0 +1,29 @@ +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'vmfusion')) +require 'fog/compute' + +module Fog + module Compute + class Vmfusion < Fog::Service + + model_path 'fog/vmfusion/models/compute' + model :server + collection :servers + + class Mock + + def initialize(options={}) + Fog::Mock.not_implemented + end + + end + + class Real + + def initialize(options={}) + require 'fission' + end + + end + end + end +end diff --git a/lib/fog/vmfusion/models/compute/server.rb b/lib/fog/vmfusion/models/compute/server.rb new file mode 100644 index 0000000000..561178ff85 --- /dev/null +++ b/lib/fog/vmfusion/models/compute/server.rb @@ -0,0 +1,223 @@ +require 'fog/core/model' + +module Fog + module Compute + class Vmfusion + + class Server < Fog::Model + + identity :name + + attribute :name + attribute :state + + attr_accessor :password + attr_writer :private_key, :private_key_path, :public_key, :public_key_path, :username + + def initalize(attributes={}) + end + + def save + raise Fog::Errors::Error.new('Creating a new vm is not yet supported') + end + + def clone(name) + requires :raw + + ::Fission::VM.clone(@raw.name,name) + return connection.servers.get(name) + end + + def destroy(options={ :force => false}) + requires :raw + + if state=="running" + if options[:force] + @raw.stop + end + end + + ::Fission::VM.delete @raw.name + end + + def start + requires :raw + + unless state=="running" + @raw.start + return true + else + return false + end + end + + def stop + requires :raw + + if state=="running" + @raw.stop + return true + else + return false + end + end + + def reboot + requires :raw + if state=="running" + @raw.stop + wait_for { state!="running"} + @raw.start + return true + else + return false + end + end + + def halt + requires :raw + if state=="running" + @raw.halt + return true + else + return false + end + + end + + def poweroff + requires :raw + halt + end + + def shutdown + requires :raw + stop + end + + def resume + requires :raw + @raw.resume + end + + def suspend + requires :raw + @raw.suspend + end + + def state + requires :raw + @raw.state + end + + def ready? + state == "running" + end + + def private_ip_address + ip_address(:private) + end + + def public_ip_address + ip_address(:public) + end + + + def username + @username ||= 'root' + end + + def ssh(commands) + requires :public_ip_address, :username + + #requires :password, :private_key + ssh_options={} + ssh_options[:password] = password unless password.nil? + ssh_options[:key_data] = [private_key] if private_key + + Fog::SSH.new(public_ip_address, @username, ssh_options).run(commands) + + end + + def scp(local_path, remote_path, upload_options = {}) + requires :public_ip_address, :username + + scp_options = {} + scp_options[:password] = password unless self.password.nil? + scp_options[:key_data] = [private_key] if self.private_key + + Fog::SCP.new(public_ip_address, username, scp_options).upload(local_path, remote_path, upload_options) + end + + # Sets up a new key + def setup(credentials = {}) + requires :public_key, :public_ip_address, :username + + credentials[:password] = password unless self.password.nil? + credentails[:key_data] = [private_key] if self.private_key + + commands = [ + %{mkdir .ssh}, + ] + if public_key + commands << %{echo "#{public_key}" >> ~/.ssh/authorized_keys} + end + + # wait for domain to be ready + Timeout::timeout(360) do + begin + Timeout::timeout(8) do + Fog::SSH.new(public_ip_address, username, credentials.merge(:timeout => 4)).run('pwd') + end + rescue Errno::ECONNREFUSED + sleep(2) + retry + rescue Net::SSH::AuthenticationFailed, Timeout::Error + retry + end + end + Fog::SSH.new(public_ip_address, username, credentials).run(commands) + end + + def private_key_path + @private_key_path ||= Fog.credentials[:private_key_path] + @private_key_path &&= File.expand_path(@private_key_path) + end + + def private_key + @private_key ||= private_key_path && File.read(private_key_path) + end + + def public_key_path + @public_key_path ||= Fog.credentials[:public_key_path] + @public_key_path &&= File.expand_path(@public_key_path) + end + + def public_key + @public_key ||= public_key_path && File.read(public_key_path) + end + + private + def ip_address(key) + @raw.ip_address + end + + def raw + @raw + end + + def raw=(new_raw) + @raw = new_raw + + raw_attributes = { + :name => new_raw.name, + :state => new_raw.state + } + + merge_attributes(raw_attributes) + end + + end + end + end +end diff --git a/lib/fog/vmfusion/models/compute/servers.rb b/lib/fog/vmfusion/models/compute/servers.rb new file mode 100644 index 0000000000..a6f2c3306f --- /dev/null +++ b/lib/fog/vmfusion/models/compute/servers.rb @@ -0,0 +1,37 @@ +require 'fog/core/collection' +require 'fog/vmfusion/models/compute/server' + +module Fog + module Compute + class Vmfusion + + class Servers < Fog::Collection + + model Fog::Compute::Vmfusion::Server + + def all(filter=nil) + + data=[] + + filter={} if filter.nil? + unless filter.has_key?(:name) + vms=::Fission::VM.all + vms.each do |vm| + data << { :raw => vm} + end + else + data << { :raw => ::Fission::VM.new(filter[:name])} + end + + load(data) + + end + + def get(name) + self.all(:name =>name).first + end + + end + end + end +end diff --git a/lib/fog/voxel.rb b/lib/fog/voxel.rb index 0f852b9fd4..2ff744da70 100644 --- a/lib/fog/voxel.rb +++ b/lib/fog/voxel.rb @@ -6,7 +6,7 @@ module Voxel extend Fog::Provider - service(:compute, 'voxel/compute') + service(:compute, 'voxel/compute', 'Compute') def self.create_signature(secret, options) to_sign = options.keys.map { |k| k.to_s }.sort.map { |k| "#{k}#{options[k.to_sym]}" }.join("") diff --git a/lib/fog/vsphere.rb b/lib/fog/vsphere.rb index 104e157400..2d8fd32a06 100644 --- a/lib/fog/vsphere.rb +++ b/lib/fog/vsphere.rb @@ -11,7 +11,7 @@ class SecurityError < ServiceError; end class NotFound < ServiceError; end end - service(:compute, 'vsphere/compute') + service(:compute, 'vsphere/compute', 'Compute') end end diff --git a/lib/fog/vsphere/compute.rb b/lib/fog/vsphere/compute.rb index 41633e5bec..a2e85a9167 100644 --- a/lib/fog/vsphere/compute.rb +++ b/lib/fog/vsphere/compute.rb @@ -26,6 +26,24 @@ module Shared attr_reader :vsphere_is_vcenter attr_reader :vsphere_rev + attr_reader :vsphere_server + attr_reader :vsphere_username + + ATTR_TO_PROP = { + :id => 'config.instanceUuid', + :name => 'name', + :uuid => 'config.uuid', + :instance_uuid => 'config.instanceUuid', + :hostname => 'summary.guest.hostName', + :operatingsystem => 'summary.guest.guestFullName', + :ipaddress => 'guest.ipAddress', + :power_state => 'runtime.powerState', + :connection_state => 'runtime.connectionState', + :host => 'runtime.host', + :tools_state => 'guest.toolsStatus', + :tools_version => 'guest.toolsVersionStatus', + :is_a_template => 'config.template', + } # Utility method to convert a VMware managed object into an attribute hash. # This should only really be necessary for the real class. @@ -33,30 +51,14 @@ module Shared # in order to massage VMware Managed Object References into Attribute Hashes. def convert_vm_mob_ref_to_attr_hash(vm_mob_ref) return nil unless vm_mob_ref - # A cloning VM doesn't have a configuration yet. Unfortuantely we just get - # a RunTime exception. - begin - is_ready = vm_mob_ref.config ? true : false - rescue RuntimeError - is_ready = nil + + props = vm_mob_ref.collect! *ATTR_TO_PROP.values.uniq + Hash[ATTR_TO_PROP.map { |k,v| [k.to_s, props[v]] }].tap do |attrs| + attrs['id'] ||= vm_mob_ref._ref + attrs['mo_ref'] = vm_mob_ref._ref + attrs['hypervisor'] = attrs['host'].name + attrs['mac_addresses'] = vm_mob_ref.macs end - { - 'id' => is_ready ? vm_mob_ref.config.instanceUuid : vm_mob_ref._ref, - 'mo_ref' => vm_mob_ref._ref, - 'name' => vm_mob_ref.name, - 'uuid' => is_ready ? vm_mob_ref.config.uuid : nil, - 'instance_uuid' => is_ready ? vm_mob_ref.config.instanceUuid : nil, - 'hostname' => vm_mob_ref.summary.guest.hostName, - 'operatingsystem' => vm_mob_ref.summary.guest.guestFullName, - 'ipaddress' => vm_mob_ref.summary.guest.ipAddress, - 'power_state' => vm_mob_ref.runtime.powerState, - 'connection_state' => vm_mob_ref.runtime.connectionState, - 'hypervisor' => vm_mob_ref.runtime.host ? vm_mob_ref.runtime.host.name : nil, - 'tools_state' => vm_mob_ref.summary.guest.toolsStatus, - 'tools_version' => vm_mob_ref.summary.guest.toolsVersionStatus, - 'mac_addresses' => is_ready ? vm_mob_ref.macs : nil, - 'is_a_template' => is_ready ? vm_mob_ref.config.template : nil - } end end @@ -66,14 +68,12 @@ class Mock include Shared def initialize(options={}) - require 'mocha' @vsphere_username = options[:vsphere_username] @vsphere_password = 'REDACTED' @vsphere_server = options[:vsphere_server] @vsphere_expected_pubkey_hash = options[:vsphere_expected_pubkey_hash] @vsphere_is_vcenter = true @vsphere_rev = '4.0' - @connection = Mocha::Mock.new end end @@ -125,6 +125,7 @@ def initialize(options={}) end @vsphere_is_vcenter = @connection.serviceContent.about.apiType == "VirtualCenter" + @vsphere_rev = @connection.rev authenticate end diff --git a/lib/fog/vsphere/requests/compute/vm_clone.rb b/lib/fog/vsphere/requests/compute/vm_clone.rb index 2a124051cc..3b68b43b25 100644 --- a/lib/fog/vsphere/requests/compute/vm_clone.rb +++ b/lib/fog/vsphere/requests/compute/vm_clone.rb @@ -25,9 +25,11 @@ def vm_clone(options = {}) # Option handling options = vm_clone_check_options(options) + notfound = lambda { raise Fog::Compute::Vsphere::NotFound, "Cloud not find VM template" } + # REVISIT: This will have horrible performance for large sites. # Find the Managed Object reference of the template VM (Wish I could do this with the API) - vm_mob_ref = list_all_virtual_machine_mobs.find do |vm| + vm_mob_ref = list_all_virtual_machine_mobs.find(notfound) do |vm| convert_vm_mob_ref_to_attr_hash(vm)['instance_uuid'] == options['instance_uuid'] end @@ -79,6 +81,10 @@ class Mock def vm_clone(options = {}) # Option handling options = vm_clone_check_options(options) + notfound = lambda { raise Fog::Compute::Vsphere::NotFound, "Cloud not find VM template" } + vm_mob_ref = list_virtual_machines['virtual_machines'].find(notfound) do |vm| + vm['instance_uuid'] == options['instance_uuid'] + end { 'vm_ref' => 'vm-123', 'task_ref' => 'task-1234' diff --git a/lib/fog/zerigo.rb b/lib/fog/zerigo.rb index 78725eac8b..7acf6788e8 100644 --- a/lib/fog/zerigo.rb +++ b/lib/fog/zerigo.rb @@ -5,7 +5,7 @@ module Zerigo extend Fog::Provider - service(:dns, 'zerigo/dns') + service(:dns, 'zerigo/dns', 'DNS') end end diff --git a/lib/fog/zerigo/dns.rb b/lib/fog/zerigo/dns.rb index 5acbbcbed3..8a03465ba3 100644 --- a/lib/fog/zerigo/dns.rb +++ b/lib/fog/zerigo/dns.rb @@ -34,7 +34,7 @@ class Mock def self.data @data ||= Hash.new do |hash, key| - hash[key] = {} + hash[key] = key == :zones ? [] : {} end end @@ -48,13 +48,24 @@ def initialize(options={}) end def data - self.class.data[@zerigo_email] + self.class.data end def reset_data - self.class.data.delete(@zerigo_email) + self.class.reset end + def find_by_zone_id(zone_id) + self.data[:zones].find { |z| z['id'] == zone_id } + end + + def find_by_domain(domain) + self.data[:zones].find { |z| z['domain'] == domain } + end + + def find_host(host_id) + self.data[:zones].collect { |z| z['hosts'].find { |h| h['id'] == host_id } }.compact.first + end end class Real diff --git a/lib/fog/zerigo/models/dns/record.rb b/lib/fog/zerigo/models/dns/record.rb index 005acba893..9e7eefb124 100644 --- a/lib/fog/zerigo/models/dns/record.rb +++ b/lib/fog/zerigo/models/dns/record.rb @@ -48,7 +48,7 @@ def save connection.create_host(@zone.id, type, value, options) else options[:host_type] = type - options[:data] = data + options[:data] = value connection.update_host(identity, options) end merge_attributes(data.body) diff --git a/lib/fog/zerigo/models/dns/records.rb b/lib/fog/zerigo/models/dns/records.rb index d789129762..a5f8e58004 100644 --- a/lib/fog/zerigo/models/dns/records.rb +++ b/lib/fog/zerigo/models/dns/records.rb @@ -34,6 +34,11 @@ def new(attributes = {}) super({ :zone => zone }.merge!(attributes)) end + def find(fqdn) + hosts = connection.find_hosts(fqdn, zone.id).body['hosts'] + hosts.collect { |host| new(host) } + end + end end diff --git a/lib/fog/zerigo/models/dns/zones.rb b/lib/fog/zerigo/models/dns/zones.rb index c4b12a84e6..361db3f19b 100644 --- a/lib/fog/zerigo/models/dns/zones.rb +++ b/lib/fog/zerigo/models/dns/zones.rb @@ -14,8 +14,8 @@ def all load(data) end - def get(zone_id) - data = connection.get_zone(zone_id).body + def get(zone_id_or_domain) + data = connection.get_zone(zone_id_or_domain).body zone = new(data) zone.records.load(data['hosts']) zone diff --git a/lib/fog/zerigo/requests/dns/count_hosts.rb b/lib/fog/zerigo/requests/dns/count_hosts.rb index 5e19769de7..a39f8a3ad6 100644 --- a/lib/fog/zerigo/requests/dns/count_hosts.rb +++ b/lib/fog/zerigo/requests/dns/count_hosts.rb @@ -13,7 +13,7 @@ class Real # * body<~Hash> # * 'count'<~Integer> # * 'status'<~Integer> - 200 indicates success - def count_hosts( zone_id) + def count_hosts(zone_id) request( :expects => 200, :method => 'GET', @@ -23,6 +23,25 @@ def count_hosts( zone_id) end end + + class Mock # :nodoc:all + def count_hosts(zone_id) + zone = find_by_zone_id(zone_id) + + response = Excon::Response.new + + if zone + response.status = 200 + response.body = { + 'count' => zone['hosts'].size + } + else + response.status = 404 + end + + response + end + end end end end diff --git a/lib/fog/zerigo/requests/dns/count_zones.rb b/lib/fog/zerigo/requests/dns/count_zones.rb index b1eac98ea0..fe5a7a82c5 100644 --- a/lib/fog/zerigo/requests/dns/count_zones.rb +++ b/lib/fog/zerigo/requests/dns/count_zones.rb @@ -13,7 +13,7 @@ class Real # * body<~Hash> # * 'count'<~Integer> # * 'status'<~Integer> - 200 indicates success - def count_zones() + def count_zones request( :expects => 200, :method => 'GET', @@ -23,6 +23,17 @@ def count_zones() end end + + class Mock # :nodoc:all + def count_zones + response = Excon::Response.new + + response.status = 200 + response.body = { 'count' => self.data[:zones].size } + + response + end + end end end end diff --git a/lib/fog/zerigo/requests/dns/create_host.rb b/lib/fog/zerigo/requests/dns/create_host.rb index 4f3eddacda..61e73e0133 100644 --- a/lib/fog/zerigo/requests/dns/create_host.rb +++ b/lib/fog/zerigo/requests/dns/create_host.rb @@ -11,7 +11,7 @@ class Real # * zone_id<~Integer> # * host_type<~String> # * data<~String> - # * options<~Hash> - optional paramaters + # * options<~Hash> - optional parameters # * hostname<~String> - Note: normally this is set/required!! # * notes<~String> # * priority<~Integer> - Note: required for MX or SRV records @@ -57,6 +57,72 @@ def create_host(zone_id, host_type, data, options = {}) end end + + class Mock # :nodoc:all + def valid_host_types + %w[A AAAA CNAME GEO MX NS SPF SRV TXT URL PTR CNAME NS] + end + + def create_host(zone_id, host_type, data, options = {}) + zone = find_by_zone_id(zone_id) + + response = Excon::Response.new + + # Handle error cases. + + # Zone doesn't exist. + unless zone + response.status = 404 + return response + end + + # Bad host type. + unless valid_host_types.include?(host_type) + response.status = 422 + response.body = { + 'errors' => [ + 'error' => 'Host type is not included in the list' + ] + } + + return response + end + + # Missing or bad priority value for MX or SRV records. + if %w[MX SRV].include?(host_type) && options['priority'].to_s !~ /\d+/ + response.status = 422 + response.body = { + 'errors' => [ + 'error' => 'Priority is not a number' + ] + } + + return response + end + + # Successful case. + now = Time.now + host = { + 'id' => rand(10000000), + 'fqdn' => options[:hostname] ? "#{options[:hostname]}.#{zone['domain']}" : zone['domain'], + 'data' => data, + 'hostname' => options[:hostname], + 'ttl' => options[:ttl].to_i, + 'host-type' => host_type, + 'created-at' => now, + 'updated-at' => now, + 'notes' => options[:notes], + 'priority' => options[:priority].to_i, + 'zone-id' => zone_id + } + + zone['hosts'] << host + response.status = 201 + response.body = host + + response + end + end end end end diff --git a/lib/fog/zerigo/requests/dns/create_zone.rb b/lib/fog/zerigo/requests/dns/create_zone.rb index aa034cbf34..9d9f058ddf 100644 --- a/lib/fog/zerigo/requests/dns/create_zone.rb +++ b/lib/fog/zerigo/requests/dns/create_zone.rb @@ -46,7 +46,7 @@ class Real # * 'restrict-axfr'<~String> # * 'status'<~Integer> - 201 if successful - def create_zone( domain, default_ttl, ns_type, options = {}) + def create_zone(domain, default_ttl, ns_type, options = {}) optional_tags= '' options.each { |option, value| @@ -84,6 +84,49 @@ def create_zone( domain, default_ttl, ns_type, options = {}) end end + + class Mock # :nodoc:all + def create_zone(domain, default_ttl, ns_type, options = {}) + now = Time.now + zone = { + 'id' => rand(10000000), + 'domain' => domain, + 'created-at' => now, + 'updated-at' => now, + 'ns1' => options[:ns1], + 'nx-ttl' => options[:nx_ttl].to_i, + 'default-ttl' => default_ttl.to_i, + 'ns-type' => ns_type, + 'hosts' => options[:hosts] || [], + 'hosts-count' => (options[:hosts] || []).size, + 'notes' => options[:notes], + 'slave-nameservers' => options[:slave_nameservers], + 'tag-list' => options[:tag_list] + } + + response = Excon::Response.new + + if self.data[:zones].any? {|z| z['domain'] == zone['domain'] } + response.status = 422 + response.body = { + 'errors' => [ + 'error' => 'Domain is already associated to another account', + 'error' => 'Domain already exists. If it was just deleted, wait a minute and try again' + ] + } + + else + self.data[:zones] << zone + + response.status = 201 + response.headers = { 'Location' => "http://ns.zerigo.com/api/1.1/zones/#{zone['id']}" } + response.body = zone['hosts'].empty? ? zone.merge(:hosts => nil) : zone # Zerigo returns nil, not an empty list only on the create command. + end + + response + end + end + end end end diff --git a/lib/fog/zerigo/requests/dns/delete_host.rb b/lib/fog/zerigo/requests/dns/delete_host.rb index ab8404dcf8..c57bbf09d8 100644 --- a/lib/fog/zerigo/requests/dns/delete_host.rb +++ b/lib/fog/zerigo/requests/dns/delete_host.rb @@ -19,6 +19,25 @@ def delete_host(host_id) end end + + class Mock # :nodoc:all + def delete_host(host_id) + host = find_host(host_id) + + response = Excon::Response.new + + if host + zone = find_by_zone_id(host['zone-id']) + zone['hosts'].delete(host) + + response.status = 200 + else + response.status = 404 + end + + response + end + end end end end diff --git a/lib/fog/zerigo/requests/dns/delete_zone.rb b/lib/fog/zerigo/requests/dns/delete_zone.rb index da04f09356..f6e2015b87 100644 --- a/lib/fog/zerigo/requests/dns/delete_zone.rb +++ b/lib/fog/zerigo/requests/dns/delete_zone.rb @@ -20,6 +20,23 @@ def delete_zone(zone_id) end end + + class Mock # :nodoc:all + def delete_zone(zone_id) + zone = find_by_zone_id(zone_id) + + response = Excon::Response.new + + if zone + self.data[:zones].delete(zone) + response.status = 200 + else + response.status = 404 + end + + response + end + end end end end diff --git a/lib/fog/zerigo/requests/dns/find_hosts.rb b/lib/fog/zerigo/requests/dns/find_hosts.rb index dbe3848d56..cd2f074e56 100644 --- a/lib/fog/zerigo/requests/dns/find_hosts.rb +++ b/lib/fog/zerigo/requests/dns/find_hosts.rb @@ -29,7 +29,7 @@ class Real # * 'zone-id'<~String> # * 'status'<~Integer> - 200 indicated success # - def find_hosts( fqdn, zone_id = nil) + def find_hosts(fqdn, zone_id = nil) if zone_id.nil? #look for matching host across all zones request( @@ -50,6 +50,27 @@ def find_hosts( fqdn, zone_id = nil) end end + + class Mock # :nodoc:all + def find_hosts(fqdn, zone_id = nil) + response = Excon::Response.new + + zone = find_by_zone_id(zone_id) + if zone_id && !zone + response.status = 404 + else + hosts = zone ? zone['hosts'].select { |z| z['fqdn'] == fqdn } : + self.data[:zones].collect { |z| z['hosts'].find { |h| h['fqdn'] == fqdn } }.compact + + response.status = 200 + response.body = { + 'hosts' => hosts + } + end + + response + end + end end end end diff --git a/lib/fog/zerigo/requests/dns/get_host.rb b/lib/fog/zerigo/requests/dns/get_host.rb index e2410c28e4..cb30c27742 100644 --- a/lib/fog/zerigo/requests/dns/get_host.rb +++ b/lib/fog/zerigo/requests/dns/get_host.rb @@ -24,7 +24,7 @@ class Real # * 'updated-at'<~String> # * 'zone-id'<~String> # * 'status'<~Integer> - 200 indicates success - def get_host( host_id) + def get_host(host_id) request( :expects => 200, :method => 'GET', @@ -34,6 +34,23 @@ def get_host( host_id) end end + + class Mock # :nodoc:all + def get_host(host_id) + host = find_host(host_id) + + response = Excon::Response.new + + if host + response.status = 200 + response.body = host + else + response.status = 404 + end + + response + end + end end end end diff --git a/lib/fog/zerigo/requests/dns/get_zone.rb b/lib/fog/zerigo/requests/dns/get_zone.rb index c335ca0c26..9077a3e8ac 100644 --- a/lib/fog/zerigo/requests/dns/get_zone.rb +++ b/lib/fog/zerigo/requests/dns/get_zone.rb @@ -34,16 +34,33 @@ class Real # * 'restrict-axfr'<~String> # * 'status'<~Integer> - 200 indicates success - def get_zone(zone) + def get_zone(zone_id_or_domain) request( :expects => 200, :method => 'GET', :parser => Fog::Parsers::DNS::Zerigo::GetZone.new, - :path => "/api/1.1/zones/#{zone}.xml" + :path => "/api/1.1/zones/#{zone_id_or_domain}.xml" ) end end + + class Mock # :nodoc:all + def get_zone(zone_id_or_domain) + zone = find_by_zone_id(zone_id_or_domain) || find_by_domain(zone_id_or_domain) + + response = Excon::Response.new + + if zone + response.status = 200 + response.body = zone + else + response.status = 404 + end + + response + end + end end end end diff --git a/lib/fog/zerigo/requests/dns/get_zone_stats.rb b/lib/fog/zerigo/requests/dns/get_zone_stats.rb index c7726f10e6..8af53f4e4c 100644 --- a/lib/fog/zerigo/requests/dns/get_zone_stats.rb +++ b/lib/fog/zerigo/requests/dns/get_zone_stats.rb @@ -31,6 +31,29 @@ def get_zone_stats(zone_id) end end + + class Mock # :nodoc:all + def get_zone_stats(zone_id) + zone = find_by_zone_id(zone_id) + + response = Excon::Response.new + + if zone + response.status = 200 + response.body = { + 'id' => zone, + 'domain' => zone['domain'], + 'period-begin' => zone['created-at'].strftime("%F"), + 'period-end' => Date.today.to_s, + 'queries' => 0 + } + else + response.status = 404 + end + + response + end + end end end end diff --git a/lib/fog/zerigo/requests/dns/list_hosts.rb b/lib/fog/zerigo/requests/dns/list_hosts.rb index 3a980bac35..1751f8b789 100644 --- a/lib/fog/zerigo/requests/dns/list_hosts.rb +++ b/lib/fog/zerigo/requests/dns/list_hosts.rb @@ -25,7 +25,7 @@ class Real # * 'updated-at'<~String> # * 'zone-id'<~String> # * 'status'<~Integer> - 200 indicates success - def list_hosts( zone_id) + def list_hosts(zone_id) request( :expects => 200, :method => 'GET', @@ -35,6 +35,25 @@ def list_hosts( zone_id) end end + + class Mock # :nodoc:all + def list_hosts(zone_id) + zone = find_by_zone_id(zone_id) + + response = Excon::Response.new + + if zone + response.status = 200 + response.body = { + 'hosts' => zone['hosts'] + } + else + response.status = 404 + end + + response + end + end end end end diff --git a/lib/fog/zerigo/requests/dns/list_zones.rb b/lib/fog/zerigo/requests/dns/list_zones.rb index b7f541bf07..ff9349693a 100644 --- a/lib/fog/zerigo/requests/dns/list_zones.rb +++ b/lib/fog/zerigo/requests/dns/list_zones.rb @@ -40,6 +40,21 @@ def list_zones end end + + class Mock # :nodoc:all + + def list_zones + response = Excon::Response.new + + response.status = 200 + response.body = { + 'zones' => self.data[:zones] + } + + response + end + + end end end end diff --git a/lib/fog/zerigo/requests/dns/update_host.rb b/lib/fog/zerigo/requests/dns/update_host.rb index f190512dfc..acc42e497c 100644 --- a/lib/fog/zerigo/requests/dns/update_host.rb +++ b/lib/fog/zerigo/requests/dns/update_host.rb @@ -18,7 +18,7 @@ class Real # * response<~Excon::Response>: # * 'status'<~Integer> - 200 for success # - def update_host( host_id, options = {}) + def update_host(host_id, options = {}) optional_tags= '' options.each { |option, value| @@ -47,6 +47,25 @@ def update_host( host_id, options = {}) end end + + class Mock # :nodoc:all + def update_host(host_id, options = {}) + host = find_host(host_id) + + response = Excon::Response.new + + if host + options.each { |k, v| host[k.to_s] = v } # Deal with symbols in requests but strings in responses. + host['updated-at'] = Time.now + + response.status = 200 + else + response.status = 404 + end + + response + end + end end end end diff --git a/lib/fog/zerigo/requests/dns/update_zone.rb b/lib/fog/zerigo/requests/dns/update_zone.rb index daff9828d5..f6c1ba4fef 100644 --- a/lib/fog/zerigo/requests/dns/update_zone.rb +++ b/lib/fog/zerigo/requests/dns/update_zone.rb @@ -24,7 +24,7 @@ class Real # ==== Returns # * response<~Excon::Response>: # * 'status'<~Integer> - 200 for success - def update_zone( zone_id, options = {}) + def update_zone(zone_id, options = {}) optional_tags= '' options.each { |option, value| @@ -65,6 +65,25 @@ def update_zone( zone_id, options = {}) end end + + class Mock # :nodoc:all + def update_zone(zone_id, options = {}) + zone = find_by_zone_id(zone_id) + + response = Excon::Response.new + + if zone + options.each { |k, v| zone[k.to_s] = v } # Deal with symbols in requests but strings in responses. + zone['updated-at'] = Time.now + + response.status = 200 + else + response.status = 404 + end + + response + end + end end end end diff --git a/tests/aws/models/auto_scaling/activities_tests.rb b/tests/aws/models/auto_scaling/activities_tests.rb index 5df4880e33..449047d1d8 100644 --- a/tests/aws/models/auto_scaling/activities_tests.rb +++ b/tests/aws/models/auto_scaling/activities_tests.rb @@ -1,6 +1,6 @@ Shindo.tests('AWS::AutoScaling | activities', ['aws', 'auto_scaling_m']) do pending # FIXME: activity#save is not implemented - collection_tests(AWS[:auto_scaling].activities, {}, false) + collection_tests(Fog::AWS[:auto_scaling].activities, {}, false) end diff --git a/tests/aws/models/auto_scaling/configuration_test.rb b/tests/aws/models/auto_scaling/configuration_test.rb index 08605355e1..d0d77508ff 100644 --- a/tests/aws/models/auto_scaling/configuration_test.rb +++ b/tests/aws/models/auto_scaling/configuration_test.rb @@ -6,7 +6,7 @@ :instance_type => 't1.micro' } - model_tests(AWS[:auto_scaling].configurations, params, false) do + model_tests(Fog::AWS[:auto_scaling].configurations, params, false) do @instance.wait_for { ready? } end diff --git a/tests/aws/models/auto_scaling/configurations_tests.rb b/tests/aws/models/auto_scaling/configurations_tests.rb index 71066dd7d4..1dae9f3926 100644 --- a/tests/aws/models/auto_scaling/configurations_tests.rb +++ b/tests/aws/models/auto_scaling/configurations_tests.rb @@ -6,6 +6,6 @@ :instance_type => 't1.micro' } - collection_tests(AWS[:auto_scaling].configurations, params, false) + collection_tests(Fog::AWS[:auto_scaling].configurations, params, false) end diff --git a/tests/aws/models/auto_scaling/instances_tests.rb b/tests/aws/models/auto_scaling/instances_tests.rb index 29e15f2473..afaaa41b35 100644 --- a/tests/aws/models/auto_scaling/instances_tests.rb +++ b/tests/aws/models/auto_scaling/instances_tests.rb @@ -1,6 +1,6 @@ Shindo.tests('AWS::AutoScaling | instances', ['aws', 'auto_scaling_m']) do pending # FIXME: instance#save is not defined - #collection_tests(AWS[:auto_scaling].instances, {}, false) + #collection_tests(Fog::AWS[:auto_scaling].instances, {}, false) end diff --git a/tests/aws/models/cloud_watch/alarm_data_tests.rb b/tests/aws/models/cloud_watch/alarm_data_tests.rb new file mode 100644 index 0000000000..44da0080ff --- /dev/null +++ b/tests/aws/models/cloud_watch/alarm_data_tests.rb @@ -0,0 +1,40 @@ +Shindo.tests("AWS::CloudWatch | alarm_data", ['aws', 'cloudwatch']) do + + tests('success') do + tests("#all").succeeds do + Fog::AWS[:cloud_watch].alarm_data.all + end + + alarm_name_prefix = {'AlarmNamePrefix'=>'tmp'} + tests("#all_by_prefix").succeeds do + Fog::AWS[:cloud_watch].alarm_data.all(alarm_name_prefix) + end + + namespace = 'AWS/EC2' + metric_name = 'CPUUtilization' + + tests("#get").succeeds do + Fog::AWS[:cloud_watch].alarm_data.get(namespace, metric_name).to_json + end + + new_attributes = { + :alarm_name => 'tmp-alarm', + :comparison_operator => 'GreaterThanOrEqualToThreshold', + :evaluation_periods => 1, + :metric_name => 'tmp-metric-alarm', + :namespace => 'fog-0.11.0', + :period => 60, + :statistic => 'Sum', + :threshold => 5 + } + tests('#new').returns(new_attributes) do + Fog::AWS[:cloud_watch].alarm_data.new(new_attributes).attributes + end + + tests('#create').returns(new_attributes) do + Fog::AWS[:cloud_watch].alarm_data.create(new_attributes).attributes + end + + end + +end diff --git a/tests/aws/models/cloud_watch/alarm_history_tests.rb b/tests/aws/models/cloud_watch/alarm_history_tests.rb new file mode 100644 index 0000000000..068c6c1696 --- /dev/null +++ b/tests/aws/models/cloud_watch/alarm_history_tests.rb @@ -0,0 +1,20 @@ +Shindo.tests("AWS::CloudWatch | alarm_histories", ['aws', 'cloudwatch']) do + + tests('success') do + tests("#all").succeeds do + Fog::AWS[:cloud_watch].alarm_histories.all + end + + new_attributes = { + :alarm_name => 'tmp-alarm', + :end_date => '', + :history_item_type => 'StateUpdate', + :max_records => 1, + :start_date => '' + } + tests('#new').returns(new_attributes) do + Fog::AWS[:cloud_watch].alarm_histories.new(new_attributes).attributes + end + end + +end diff --git a/tests/aws/models/cloud_watch/metric_statistics_tests.rb b/tests/aws/models/cloud_watch/metric_statistics_tests.rb index ff3d7a32ed..48d3111ac9 100644 --- a/tests/aws/models/cloud_watch/metric_statistics_tests.rb +++ b/tests/aws/models/cloud_watch/metric_statistics_tests.rb @@ -12,7 +12,7 @@ statisticTypes = ['Minimum','Maximum','Sum','SampleCount','Average'] tests("#all").succeeds do - @statistics = AWS[:cloud_watch].metric_statistics.all({'Statistics' => statisticTypes, 'StartTime' => startTime, 'EndTime' => endTime, 'Period' => period, 'MetricName' => metricName, 'Namespace' => namespace, 'Dimensions' => [{'Name' => 'InstanceId', 'Value' => instanceId}]}) + @statistics = Fog::AWS[:cloud_watch].metric_statistics.all({'Statistics' => statisticTypes, 'StartTime' => startTime, 'EndTime' => endTime, 'Period' => period, 'MetricName' => metricName, 'Namespace' => namespace, 'Dimensions' => [{'Name' => 'InstanceId', 'Value' => instanceId}]}) end tests("#all_not_empty").returns(true) do @@ -26,11 +26,11 @@ :unit => 'None' } tests('#new').returns(new_attributes) do - AWS[:cloud_watch].metric_statistics.new(new_attributes).attributes + Fog::AWS[:cloud_watch].metric_statistics.new(new_attributes).attributes end tests('#create').returns(new_attributes) do - AWS[:cloud_watch].metric_statistics.create(new_attributes).attributes + Fog::AWS[:cloud_watch].metric_statistics.create(new_attributes).attributes end stats_set_attributes = { @@ -44,7 +44,7 @@ :unit => 'None' } tests('#create_stats_set').returns(stats_set_attributes) do - AWS[:cloud_watch].metric_statistics.create(stats_set_attributes).attributes + Fog::AWS[:cloud_watch].metric_statistics.create(stats_set_attributes).attributes end end diff --git a/tests/aws/models/cloud_watch/metrics_tests.rb b/tests/aws/models/cloud_watch/metrics_tests.rb index 12b379e56c..1285caf1e3 100644 --- a/tests/aws/models/cloud_watch/metrics_tests.rb +++ b/tests/aws/models/cloud_watch/metrics_tests.rb @@ -3,13 +3,13 @@ tests('success') do pending # FIXME: the hardcoded instance id won't be available tests("#all").succeeds do - AWS[:cloud_watch].metrics.all + Fog::AWS[:cloud_watch].metrics.all end instanceId = 'i-fd713391' metricName = 'CPUUtilization' namespace = 'AWS/EC2' tests("#get").returns({:dimensions=>[{"Name"=>"InstanceId", "Value"=>instanceId}], :name=>metricName, :namespace=>namespace}) do - AWS[:cloud_watch].metrics.get(namespace, metricName, {'InstanceId' => instanceId}).attributes + Fog::AWS[:cloud_watch].metrics.get(namespace, metricName, {'InstanceId' => instanceId}).attributes end end diff --git a/tests/aws/models/compute/security_group_tests.rb b/tests/aws/models/compute/security_group_tests.rb index 7d38dd89f8..33a86de0ef 100644 --- a/tests/aws/models/compute/security_group_tests.rb +++ b/tests/aws/models/compute/security_group_tests.rb @@ -2,17 +2,10 @@ model_tests(Fog::Compute[:aws].security_groups, {:description => 'foggroupdescription', :name => 'foggroupname'}, true) - tests("a group with trailing whitespace") do - @group = Fog::Compute[:aws].security_groups.create(:name => "foggroup with spaces ", :description => " fog group desc ") - test("name is correct") do - @group.name == "foggroup with spaces " - end - - test("description is correct") do - @group.description == " fog group desc " - end + tests("authorize and revoke helpers") do + @group = Fog::Compute[:aws].security_groups.create(:name => "foggroup", :description => "fog group desc") - @other_group = Fog::Compute[:aws].security_groups.create(:name => 'other group', :description => 'another group') + @other_group = Fog::Compute[:aws].security_groups.create(:name => 'fog other group', :description => 'another fog group') test("authorize access by another security group") do @group.authorize_group_and_owner(@other_group.name) @@ -26,6 +19,18 @@ @group.ip_permissions.empty? end + test("authorize access to a port range") do + @group.authorize_port_range(5000..6000) + @group.reload + @group.ip_permissions.size == 1 + end + + test("revoke access to a port range") do + @group.revoke_port_range(5000..6000) + @group.reload + @group.ip_permissions.empty? + end + @other_group.destroy @group.destroy end diff --git a/tests/aws/models/compute/server_tests.rb b/tests/aws/models/compute/server_tests.rb index 31a67781cc..505a07194d 100644 --- a/tests/aws/models/compute/server_tests.rb +++ b/tests/aws/models/compute/server_tests.rb @@ -47,6 +47,8 @@ tests('tags') do @instance = Fog::Compute[:aws].servers.create(:tags => {'key' => 'value'}) + @instance.wait_for { ready? } + tests('@instance.reload.tags').returns({'key' => 'value'}) do @instance.reload.tags end diff --git a/tests/aws/models/elasticache/cluster_tests.rb b/tests/aws/models/elasticache/cluster_tests.rb new file mode 100644 index 0000000000..e82ed23bc0 --- /dev/null +++ b/tests/aws/models/elasticache/cluster_tests.rb @@ -0,0 +1,30 @@ +Shindo.tests('AWS::Elasticache | cache clusters', ['aws', 'elasticache']) do + cluster_params = { + :id => "fog-test-cluster-#{rand(999).to_s}", + :node_type => 'cache.m1.large', + :security_groups => ['default'], + :engine => 'memcached', + :num_nodes => 1 + } + + pending if Fog.mocking? + + Formatador.display_line "Creating cluster #{cluster_params[:id]}..." + model_tests(AWS[:elasticache].clusters, cluster_params, false) do + @instance.reload # Reload to get the cluster info from AWS + Formatador.display_line "Waiting for #{@instance.id} "+ + "to become available (#{@instance.status})..." + @instance.wait_for {ready?} + end + + # Single model is still deleting, so re-randomize the cluster ID + cluster_params[:id] = "fog-test-cluster-#{rand(999).to_s}" + Formatador.display_line "Creating cluster #{cluster_params[:id]}..." + collection_tests(AWS[:elasticache].clusters, cluster_params, false) do + @instance.reload # Reload to get the cluster info from AWS + Formatador.display_line "Waiting for #{@instance.id} "+ + "to become available (#{@instance.status})..." + @instance.wait_for {ready?} + end + +end diff --git a/tests/aws/models/elasticache/parameter_groups_tests.rb b/tests/aws/models/elasticache/parameter_groups_tests.rb new file mode 100644 index 0000000000..56d7306c8e --- /dev/null +++ b/tests/aws/models/elasticache/parameter_groups_tests.rb @@ -0,0 +1,17 @@ +Shindo.tests('AWS::Elasticache | parameter groups', ['aws', 'elasticache']) do + group_name = 'fog-test' + description = 'Fog Test' + + pending if Fog.mocking? + + model_tests( + AWS[:elasticache].parameter_groups, + {:id => group_name, :description => description}, false + ) + + collection_tests( + AWS[:elasticache].parameter_groups, + {:id => group_name, :description => description}, false + ) + +end diff --git a/tests/aws/models/elasticache/security_groups_tests.rb b/tests/aws/models/elasticache/security_groups_tests.rb new file mode 100644 index 0000000000..c3d22d8d39 --- /dev/null +++ b/tests/aws/models/elasticache/security_groups_tests.rb @@ -0,0 +1,52 @@ +Shindo.tests('AWS::Elasticache | security groups', ['aws', 'elasticache']) do + group_name = 'fog-test' + description = 'Fog Test' + + pending if Fog.mocking? + + model_tests( + AWS[:elasticache].security_groups, + {:id => group_name, :description => description}, false + ) do + + # An EC2 group to authorize + ec2_group = Fog::Compute.new(:provider => 'AWS').security_groups.create( + :name => 'fog-test-elasticache', :description => 'fog test' + ) + + # Reload to get the instance owner_id + @instance.reload + + tests('#authorize_ec2_group') do + @instance.authorize_ec2_group(ec2_group.name) + returns('authorizing') do + group = @instance.ec2_groups.detect do |g| + g['EC2SecurityGroupName'] == ec2_group.name + end + group['Status'] + end + returns(false, 'not ready') { @instance.ready? } + end + + @instance.wait_for { ready? } + + tests('#revoke_ec2_group') do + @instance.revoke_ec2_group(ec2_group.name) + returns('revoking') do + group = @instance.ec2_groups.detect do |g| + g['EC2SecurityGroupName'] == ec2_group.name + end + group['Status'] + end + returns(false, 'not ready') { @instance.ready? } + end + + ec2_group.destroy + end + + collection_tests( + AWS[:elasticache].security_groups, + {:id => group_name, :description => description}, false + ) + +end diff --git a/tests/aws/models/elb/model_tests.rb b/tests/aws/models/elb/model_tests.rb index 6512c7b73a..3966336bb7 100644 --- a/tests/aws/models/elb/model_tests.rb +++ b/tests/aws/models/elb/model_tests.rb @@ -5,13 +5,13 @@ tests('success') do tests('load_balancers') do tests('getting a missing elb') do - returns(nil) { AWS[:elb].load_balancers.get('no-such-elb') } + returns(nil) { Fog::AWS[:elb].load_balancers.get('no-such-elb') } end end tests('listeners') do tests("default attributes") do - listener = AWS[:elb].listeners.new + listener = Fog::AWS[:elb].listeners.new tests('instance_port is 80').returns(80) { listener.instance_port } tests('lb_port is 80').returns(80) { listener.lb_port } tests('protocol is HTTP').returns('HTTP') { listener.protocol } @@ -20,7 +20,7 @@ tests("specifying attributes") do attributes = {:instance_port => 2000, :lb_port => 2001, :protocol => 'SSL', :policy_names => ['fake'] } - listener = AWS[:elb].listeners.new(attributes) + listener = Fog::AWS[:elb].listeners.new(attributes) tests('instance_port is 2000').returns(2000) { listener.instance_port } tests('lb_port is 2001').returns(2001) { listener.lb_port } tests('protocol is SSL').returns('SSL') { listener.protocol } @@ -33,27 +33,27 @@ tests('create') do tests('without availability zones') do - elb = AWS[:elb].load_balancers.create(:id => elb_id) + elb = Fog::AWS[:elb].load_balancers.create(:id => elb_id, :availability_zones => @availability_zones) tests("availability zones are correct").returns(@availability_zones.sort) { elb.availability_zones.sort } tests("dns names is set").returns(true) { elb.dns_name.is_a?(String) } tests("created_at is set").returns(true) { Time === elb.created_at } tests("policies is empty").returns([]) { elb.policies } tests("default listener") do tests("1 listener").returns(1) { elb.listeners.size } - tests("params").returns(AWS[:elb].listeners.new.to_params) { elb.listeners.first.to_params } + tests("params").returns(Fog::AWS[:elb].listeners.new.to_params) { elb.listeners.first.to_params } end end tests('with availability zones') do azs = @availability_zones[1..-1] - elb2 = AWS[:elb].load_balancers.create(:id => "#{elb_id}-2", :availability_zones => azs) + elb2 = Fog::AWS[:elb].load_balancers.create(:id => "#{elb_id}-2", :availability_zones => azs) tests("availability zones are correct").returns(azs.sort) { elb2.availability_zones.sort } elb2.destroy end # Need to sleep here for IAM changes to propgate tests('with ListenerDescriptions') do - @certificate = AWS[:iam].upload_server_certificate(AWS::IAM::SERVER_CERT_PUBLIC_KEY, AWS::IAM::SERVER_CERT_PRIVATE_KEY, @key_name).body['Certificate'] + @certificate = Fog::AWS[:iam].upload_server_certificate(AWS::IAM::SERVER_CERT_PUBLIC_KEY, AWS::IAM::SERVER_CERT_PRIVATE_KEY, @key_name).body['Certificate'] sleep(8) unless Fog.mocking? listeners = [{ 'Listener' => { @@ -67,7 +67,7 @@ }, 'PolicyNames' => [] }] - elb3 = AWS[:elb].load_balancers.create(:id => "#{elb_id}-3", 'ListenerDescriptions' => listeners) + elb3 = Fog::AWS[:elb].load_balancers.create(:id => "#{elb_id}-3", 'ListenerDescriptions' => listeners, :availability_zones => @availability_zones) tests('there are 2 listeners').returns(2) { elb3.listeners.count } tests('instance_port is 2030').returns(2030) { elb3.listeners.first.instance_port } tests('lb_port is 2030').returns(2030) { elb3.listeners.first.lb_port } @@ -81,23 +81,23 @@ 'Listener' => { 'LoadBalancerPort' => 443, 'InstancePort' => 80, 'Protocol' => 'HTTPS', "SSLCertificateId" => "fakecert"} }] - AWS[:elb].load_balancers.create(:id => "#{elb_id}-4", "ListenerDescriptions" => listeners) + Fog::AWS[:elb].load_balancers.create(:id => "#{elb_id}-4", "ListenerDescriptions" => listeners, :availability_zones => @availability_zones) end end tests('all') do - elb_ids = AWS[:elb].load_balancers.all.map{|e| e.id} + elb_ids = Fog::AWS[:elb].load_balancers.all.map{|e| e.id} tests("contains elb").returns(true) { elb_ids.include? elb_id } end tests('get') do - elb_get = AWS[:elb].load_balancers.get(elb_id) + elb_get = Fog::AWS[:elb].load_balancers.get(elb_id) tests('ids match').returns(elb_id) { elb_get.id } end tests('creating a duplicate elb') do raises(Fog::AWS::ELB::IdentifierTaken) do - AWS[:elb].load_balancers.create(:id => elb_id, :availability_zones => ['us-east-1d']) + Fog::AWS[:elb].load_balancers.create(:id => elb_id, :availability_zones => ['us-east-1d']) end end @@ -110,6 +110,8 @@ end server = Fog::Compute[:aws].servers.create + server.wait_for { ready? } + tests('register instance') do begin elb.register_instances(server.id) @@ -246,6 +248,6 @@ elb.destroy end - AWS[:iam].delete_server_certificate(@key_name) + Fog::AWS[:iam].delete_server_certificate(@key_name) end end diff --git a/tests/aws/models/rds/parameter_group_tests.rb b/tests/aws/models/rds/parameter_group_tests.rb index 3646d80810..4ee10d9c0a 100644 --- a/tests/aws/models/rds/parameter_group_tests.rb +++ b/tests/aws/models/rds/parameter_group_tests.rb @@ -4,7 +4,7 @@ params = {:id => group_name, :family => 'mysql5.1', :description => group_name} pending if Fog.mocking? - model_tests(AWS[:rds].parameter_groups, params, false) do + model_tests(Fog::AWS[:rds].parameter_groups, params, false) do tests('#parameters') do #search for a sample parameter tests 'contains parameters' do diff --git a/tests/aws/models/rds/parameter_groups_tests.rb b/tests/aws/models/rds/parameter_groups_tests.rb index 096822bf94..786b2e78a8 100644 --- a/tests/aws/models/rds/parameter_groups_tests.rb +++ b/tests/aws/models/rds/parameter_groups_tests.rb @@ -4,5 +4,5 @@ params = {:id => group_name, :family => 'mysql5.1', :description => group_name} pending if Fog.mocking? - collection_tests(AWS[:rds].parameter_groups, params, false) + collection_tests(Fog::AWS[:rds].parameter_groups, params, false) end diff --git a/tests/aws/models/rds/security_group_tests.rb b/tests/aws/models/rds/security_group_tests.rb index c86534597b..800e814474 100644 --- a/tests/aws/models/rds/security_group_tests.rb +++ b/tests/aws/models/rds/security_group_tests.rb @@ -3,7 +3,7 @@ params = {:id => group_name, :description => 'fog test'} pending if Fog.mocking? - model_tests(AWS[:rds].security_groups, params, false) do + model_tests(Fog::AWS[:rds].security_groups, params, false) do tests("#description").returns('fog test') { @instance.description } diff --git a/tests/aws/models/rds/security_groups_tests.rb b/tests/aws/models/rds/security_groups_tests.rb index 46560cf90a..9fdff32daa 100644 --- a/tests/aws/models/rds/security_groups_tests.rb +++ b/tests/aws/models/rds/security_groups_tests.rb @@ -2,5 +2,5 @@ params = {:id => 'fog-test', :description => 'fog test'} pending if Fog.mocking? - collection_tests(AWS[:rds].security_groups, params, false) + collection_tests(Fog::AWS[:rds].security_groups, params, false) end diff --git a/tests/aws/models/rds/server_tests.rb b/tests/aws/models/rds/server_tests.rb index 41bc2f485e..62d07d530a 100644 --- a/tests/aws/models/rds/server_tests.rb +++ b/tests/aws/models/rds/server_tests.rb @@ -2,9 +2,9 @@ pending if Fog.mocking? - model_tests(AWS[:rds].servers, rds_default_server_params, false) do + model_tests(Fog::AWS[:rds].servers, rds_default_server_params, false) do # We'll need this later; create it early to avoid waiting - @instance_with_final_snapshot = AWS[:rds].servers.create(rds_default_server_params.merge(:id => uniq_id("fog-snapshot-test"), :backup_retention_period => 1)) + @instance_with_final_snapshot = Fog::AWS[:rds].servers.create(rds_default_server_params.merge(:id => uniq_id("fog-snapshot-test"), :backup_retention_period => 1)) @instance.wait_for(20*60) { ready? } @@ -26,10 +26,10 @@ tests("#modify").succeeds do orig_parameter_group = @instance.db_parameter_groups.first['DBParameterGroupName'] - parameter_group = AWS[:rds].parameter_groups.create(:id => uniq_id, :family => 'mysql5.1', :description => 'fog-test') + parameter_group = Fog::AWS[:rds].parameter_groups.create(:id => uniq_id, :family => 'mysql5.1', :description => 'fog-test') orig_security_groups = @instance.db_security_groups.map{|h| h['DBSecurityGroupName']} - security_group = AWS[:rds].security_groups.create(:id => uniq_id, :description => 'fog-test') + security_group = Fog::AWS[:rds].security_groups.create(:id => uniq_id, :description => 'fog-test') modify_options = { 'DBParameterGroupName' => parameter_group.id, @@ -92,7 +92,7 @@ @instance_with_final_snapshot.wait_for { ready? } @instance_with_final_snapshot.destroy(final_snapshot_id) returns(true, "Final snapshot created") do - @final_snapshot = AWS[:rds].snapshots.get(final_snapshot_id) + @final_snapshot = Fog::AWS[:rds].snapshots.get(final_snapshot_id) !@final_snapshot.nil? end diff --git a/tests/aws/models/rds/servers_tests.rb b/tests/aws/models/rds/servers_tests.rb index 7c0679538c..fa03194dab 100644 --- a/tests/aws/models/rds/servers_tests.rb +++ b/tests/aws/models/rds/servers_tests.rb @@ -2,7 +2,7 @@ pending if Fog.mocking? - collection_tests(AWS[:rds].servers, rds_default_server_params, false) do + collection_tests(Fog::AWS[:rds].servers, rds_default_server_params, false) do @instance.wait_for { ready? } end end diff --git a/tests/aws/models/rds/snapshot_tests.rb b/tests/aws/models/rds/snapshot_tests.rb index c1e84c794c..09de659e2c 100644 --- a/tests/aws/models/rds/snapshot_tests.rb +++ b/tests/aws/models/rds/snapshot_tests.rb @@ -2,11 +2,11 @@ pending if Fog.mocking? - @server = AWS[:rds].servers.create(rds_default_server_params) + @server = Fog::AWS[:rds].servers.create(rds_default_server_params) @server.wait_for { ready? } params = {:id => uniq_id, :instance_id => @server.id} - model_tests(AWS[:rds].snapshots, params, false) do + model_tests(Fog::AWS[:rds].snapshots, params, false) do @instance.wait_for { ready? } end diff --git a/tests/aws/models/rds/snapshots_tests.rb b/tests/aws/models/rds/snapshots_tests.rb index 68d2ce007e..af6efbe3bf 100644 --- a/tests/aws/models/rds/snapshots_tests.rb +++ b/tests/aws/models/rds/snapshots_tests.rb @@ -2,11 +2,11 @@ pending if Fog.mocking? - @server = AWS[:rds].servers.create(rds_default_server_params) + @server = Fog::AWS[:rds].servers.create(rds_default_server_params) @server.wait_for { ready? } params = {:id => uniq_id, :instance_id => @server.id} - collection_tests(AWS[:rds].snapshots, params, false) do + collection_tests(Fog::AWS[:rds].snapshots, params, false) do @instance.wait_for { ready? } end diff --git a/tests/aws/requests/auto_scaling/auto_scaling_tests.rb b/tests/aws/requests/auto_scaling/auto_scaling_tests.rb index ffd5b77ce8..9eab046fa5 100644 --- a/tests/aws/requests/auto_scaling/auto_scaling_tests.rb +++ b/tests/aws/requests/auto_scaling/auto_scaling_tests.rb @@ -6,75 +6,75 @@ pending if Fog.mocking? tests("#describe_adjustment_types").formats(AWS::AutoScaling::Formats::DESCRIBE_ADJUSTMENT_TYPES) do - AWS[:auto_scaling].describe_adjustment_types.body + Fog::AWS[:auto_scaling].describe_adjustment_types.body end tests("#describe_metric_collection_types").formats(AWS::AutoScaling::Formats::DESCRIBE_METRIC_COLLECTION_TYPES) do - AWS[:auto_scaling].describe_metric_collection_types.body + Fog::AWS[:auto_scaling].describe_metric_collection_types.body end tests("#describe_scaling_process_types").formats(AWS::AutoScaling::Formats::DESCRIBE_SCALING_PROCESS_TYPES) do - AWS[:auto_scaling].describe_scaling_process_types.body + Fog::AWS[:auto_scaling].describe_scaling_process_types.body end tests("#create_launch_configuration").formats(AWS::AutoScaling::Formats::BASIC) do image_id = 'ami-8c1fece5' instance_type = 't1.micro' #listeners = [{'LoadBalancerPort' => 80, 'InstancePort' => 80, 'Protocol' => 'http'}] - AWS[:auto_scaling].create_launch_configuration(image_id, instance_type, @lc_name).body + Fog::AWS[:auto_scaling].create_launch_configuration(image_id, instance_type, @lc_name).body end tests("#describe_launch_configurations").formats(AWS::AutoScaling::Formats::DESCRIBE_LAUNCH_CONFIGURATIONS) do - AWS[:auto_scaling].describe_launch_configurations().body + Fog::AWS[:auto_scaling].describe_launch_configurations().body end tests("#describe_launch_configurations").formats(AWS::AutoScaling::Formats::DESCRIBE_LAUNCH_CONFIGURATIONS) do - AWS[:auto_scaling].describe_launch_configurations('LaunchConfigurationNames' => @lc_name).body + Fog::AWS[:auto_scaling].describe_launch_configurations('LaunchConfigurationNames' => @lc_name).body end tests("#describe_launch_configurations").formats(AWS::AutoScaling::Formats::DESCRIBE_LAUNCH_CONFIGURATIONS) do - AWS[:auto_scaling].describe_launch_configurations('LaunchConfigurationNames' => [@lc_name]).body + Fog::AWS[:auto_scaling].describe_launch_configurations('LaunchConfigurationNames' => [@lc_name]).body end tests("#create_auto_scaling_group").formats(AWS::AutoScaling::Formats::BASIC) do zones = ['us-east-1d'] max_size = 0 min_size = 0 - AWS[:auto_scaling].create_auto_scaling_group(@asg_name, zones, @lc_name, max_size, min_size).body + Fog::AWS[:auto_scaling].create_auto_scaling_group(@asg_name, zones, @lc_name, max_size, min_size).body end tests("#describe_auto_scaling_groups").formats(AWS::AutoScaling::Formats::DESCRIBE_AUTO_SCALING_GROUPS) do - AWS[:auto_scaling].describe_auto_scaling_groups().body + Fog::AWS[:auto_scaling].describe_auto_scaling_groups().body end tests("#describe_auto_scaling_groups").formats(AWS::AutoScaling::Formats::DESCRIBE_AUTO_SCALING_GROUPS) do - AWS[:auto_scaling].describe_auto_scaling_groups('AutoScalingGroupNames' => @asg_name).body + Fog::AWS[:auto_scaling].describe_auto_scaling_groups('AutoScalingGroupNames' => @asg_name).body end tests("#describe_auto_scaling_groups").formats(AWS::AutoScaling::Formats::DESCRIBE_AUTO_SCALING_GROUPS) do - AWS[:auto_scaling].describe_auto_scaling_groups('AutoScalingGroupNames' => [@asg_name]).body + Fog::AWS[:auto_scaling].describe_auto_scaling_groups('AutoScalingGroupNames' => [@asg_name]).body end tests("#describe_auto_scaling_instances").formats(AWS::AutoScaling::Formats::DESCRIBE_AUTO_SCALING_INSTANCES) do - AWS[:auto_scaling].describe_auto_scaling_instances().body + Fog::AWS[:auto_scaling].describe_auto_scaling_instances().body end tests("#describe_scaling_activities").formats(AWS::AutoScaling::Formats::DESCRIBE_SCALING_ACTIVITIES) do - AWS[:auto_scaling].describe_scaling_activities().body + Fog::AWS[:auto_scaling].describe_scaling_activities().body end tests("#describe_scaling_activities").formats(AWS::AutoScaling::Formats::DESCRIBE_SCALING_ACTIVITIES) do - AWS[:auto_scaling].describe_scaling_activities('ActivityIds' => '1').body + Fog::AWS[:auto_scaling].describe_scaling_activities('ActivityIds' => '1').body end tests("#describe_scaling_activities").formats(AWS::AutoScaling::Formats::DESCRIBE_SCALING_ACTIVITIES) do - AWS[:auto_scaling].describe_scaling_activities('ActivityIds' => ['1', '2']).body + Fog::AWS[:auto_scaling].describe_scaling_activities('ActivityIds' => ['1', '2']).body end tests("#describe_scaling_activities").formats(AWS::AutoScaling::Formats::DESCRIBE_SCALING_ACTIVITIES) do - AWS[:auto_scaling].describe_scaling_activities('AutoScalingGroupName' => @asg_name).body + Fog::AWS[:auto_scaling].describe_scaling_activities('AutoScalingGroupName' => @asg_name).body end tests("#set_desired_capacity").formats(AWS::AutoScaling::Formats::BASIC) do desired_capacity = 0 - AWS[:auto_scaling].set_desired_capacity(@asg_name, desired_capacity).body + Fog::AWS[:auto_scaling].set_desired_capacity(@asg_name, desired_capacity).body end tests("#delete_auto_scaling_group").formats(AWS::AutoScaling::Formats::BASIC) do - AWS[:auto_scaling].delete_auto_scaling_group(@asg_name).body + Fog::AWS[:auto_scaling].delete_auto_scaling_group(@asg_name).body end tests("#delete_launch_configuration").formats(AWS::AutoScaling::Formats::BASIC) do - AWS[:auto_scaling].delete_launch_configuration(@lc_name).body + Fog::AWS[:auto_scaling].delete_launch_configuration(@lc_name).body end end end diff --git a/tests/aws/requests/auto_scaling/model_tests.rb b/tests/aws/requests/auto_scaling/model_tests.rb index 1d8c731f00..ab813ecfe8 100644 --- a/tests/aws/requests/auto_scaling/model_tests.rb +++ b/tests/aws/requests/auto_scaling/model_tests.rb @@ -8,55 +8,55 @@ tests('configurations') do tests('getting a missing configuration') do - returns(nil) { AWS[:auto_scaling].configurations.get('fog-no-such-lc') } + returns(nil) { Fog::AWS[:auto_scaling].configurations.get('fog-no-such-lc') } end tests('create configuration') do - lc = AWS[:auto_scaling].configurations.create(:id => lc_id, :image_id => 'ami-8c1fece5', :instance_type => 't1.micro') + lc = Fog::AWS[:auto_scaling].configurations.create(:id => lc_id, :image_id => 'ami-8c1fece5', :instance_type => 't1.micro') #tests("dns names is set").returns(true) { lc.dns_name.is_a?(String) } tests("created_at is set").returns(true) { Time === lc.created_at } #tests("policies is empty").returns([]) { lc.policies } end tests('all configurations') do - lc_ids = AWS[:auto_scaling].configurations.all.map{|e| e.id} + lc_ids = Fog::AWS[:auto_scaling].configurations.all.map{|e| e.id} tests("contains lc").returns(true) { lc_ids.include? lc_id } end tests('get configuration') do - lc2 = AWS[:auto_scaling].configurations.get(lc_id) + lc2 = Fog::AWS[:auto_scaling].configurations.get(lc_id) tests('ids match').returns(lc_id) { lc2.id } end tests('creating a duplicate configuration') do raises(Fog::AWS::AutoScaling::IdentifierTaken) do - AWS[:auto_scaling].configurations.create(:id => lc_id, :image_id => 'ami-8c1fece5', :instance_type => 't1.micro') + Fog::AWS[:auto_scaling].configurations.create(:id => lc_id, :image_id => 'ami-8c1fece5', :instance_type => 't1.micro') end end end tests('groups') do tests('getting a missing group') do - returns(nil) { AWS[:auto_scaling].groups.get('fog-no-such-asg') } + returns(nil) { Fog::AWS[:auto_scaling].groups.get('fog-no-such-asg') } end asg = nil asg_id = 'fog-model-asg' tests('create') do - asg = AWS[:auto_scaling].groups.create(:id => asg_id, :availability_zones => ['us-east-1d'], :launch_configuration_name => lc_id) + asg = Fog::AWS[:auto_scaling].groups.create(:id => asg_id, :availability_zones => ['us-east-1d'], :launch_configuration_name => lc_id) #tests("dns names is set").returns(true) { asg.dns_name.is_a?(String) } tests("created_at is set").returns(true) { Time === asg.created_at } #tests("policies is empty").returns([]) { asg.policies } end tests('all') do - asg_ids = AWS[:auto_scaling].groups.all.map{|e| e.id} + asg_ids = Fog::AWS[:auto_scaling].groups.all.map{|e| e.id} tests("contains asg").returns(true) { asg_ids.include? asg_id } end tests('get') do - asg2 = AWS[:auto_scaling].groups.get(asg_id) + asg2 = Fog::AWS[:auto_scaling].groups.get(asg_id) tests('ids match').returns(asg_id) { asg2.id } end @@ -72,7 +72,7 @@ tests('creating a duplicate group') do raises(Fog::AWS::AutoScaling::IdentifierTaken) do - AWS[:auto_scaling].groups.create(:id => asg_id, :availability_zones => ['us-east-1d'], :launch_configuration_name => lc_id) + Fog::AWS[:auto_scaling].groups.create(:id => asg_id, :availability_zones => ['us-east-1d'], :launch_configuration_name => lc_id) end end @@ -97,7 +97,7 @@ end end - #server = AWS[:compute].servers.create + #server = Fog::AWS[:compute].servers.create #tests('register instance') do # begin # elb.register_instances(server.id) diff --git a/tests/aws/requests/cloud_formation/stack_tests.rb b/tests/aws/requests/cloud_formation/stack_tests.rb index bbdc97164a..8ced99e750 100644 --- a/tests/aws/requests/cloud_formation/stack_tests.rb +++ b/tests/aws/requests/cloud_formation/stack_tests.rb @@ -18,6 +18,11 @@ 'StackId' => String } + @update_stack_format = { + 'RequestId' => String, + 'StackId' => String + } + @get_template_format = { 'RequestId' => String, 'TemplateBody' => String @@ -91,12 +96,21 @@ tests("validate_template('TemplateURL' => '#{@template_url}')").formats(@validate_template_format) do pending if Fog.mocking? - AWS[:cloud_formation].validate_template('TemplateURL' => @template_url).body + Fog::AWS[:cloud_formation].validate_template('TemplateURL' => @template_url).body end tests("create_stack('#{@stack_name}', 'TemplateURL' => '#{@template_url}', Parameters => {'KeyName' => 'cloudformation'})").formats(@create_stack_format) do pending if Fog.mocking? - AWS[:cloud_formation].create_stack( + Fog::AWS[:cloud_formation].create_stack( + @stack_name, + 'TemplateURL' => @template_url, + 'Parameters' => {'KeyName' => 'cloudformation'} + ).body + end + + tests("update_stack('#{@stack_name}', 'TemplateURL' => '#{@template_url}', Parameters => {'KeyName' => 'cloudformation'})").formats(@update_stack_format) do + pending if Fog.mocking? + Fog::AWS[:cloud_formation].update_stack( @stack_name, 'TemplateURL' => @template_url, 'Parameters' => {'KeyName' => 'cloudformation'} @@ -105,29 +119,29 @@ tests("get_template('#{@stack_name})").formats(@get_template_format) do pending if Fog.mocking? - AWS[:cloud_formation].get_template(@stack_name).body + Fog::AWS[:cloud_formation].get_template(@stack_name).body end tests("describe_stacks").formats(@describe_stacks_format) do pending if Fog.mocking? - AWS[:cloud_formation].describe_stacks.body + Fog::AWS[:cloud_formation].describe_stacks.body end sleep(1) # avoid throttling tests("describe_stack_events('#{@stack_name}')").formats(@describe_stack_events_format) do pending if Fog.mocking? - AWS[:cloud_formation].describe_stack_events(@stack_name).body + Fog::AWS[:cloud_formation].describe_stack_events(@stack_name).body end tests("describe_stack_resources('StackName' => '#{@stack_name}')").formats(@describe_stack_resources_format) do pending if Fog.mocking? - AWS[:cloud_formation].describe_stack_resources('StackName' => @stack_name).body + Fog::AWS[:cloud_formation].describe_stack_resources('StackName' => @stack_name).body end tests("delete_stack('#{@stack_name}')").succeeds do pending if Fog.mocking? - AWS[:cloud_formation].delete_stack(@stack_name) + Fog::AWS[:cloud_formation].delete_stack(@stack_name) end unless Fog.mocking? diff --git a/tests/aws/requests/cloud_watch/get_metric_statistics_tests.rb b/tests/aws/requests/cloud_watch/get_metric_statistics_tests.rb index 16859b8e0a..64b396db45 100644 --- a/tests/aws/requests/cloud_watch/get_metric_statistics_tests.rb +++ b/tests/aws/requests/cloud_watch/get_metric_statistics_tests.rb @@ -22,7 +22,7 @@ tests("#get_metric_statistics").formats(@metrics_statistic_format) do pending if Fog.mocking? instanceId = 'i-420c352f' - AWS[:cloud_watch].get_metric_statistics({'Statistics' => ['Minimum','Maximum','Sum','SampleCount','Average'], 'StartTime' => (Time.now-600).iso8601, 'EndTime' => Time.now.iso8601, 'Period' => 60, 'MetricName' => 'DiskReadBytes', 'Namespace' => 'AWS/EC2', 'Dimensions' => [{'Name' => 'InstanceId', 'Value' => instanceId}]}).body + Fog::AWS[:cloud_watch].get_metric_statistics({'Statistics' => ['Minimum','Maximum','Sum','SampleCount','Average'], 'StartTime' => (Time.now-600).iso8601, 'EndTime' => Time.now.iso8601, 'Period' => 60, 'MetricName' => 'DiskReadBytes', 'Namespace' => 'AWS/EC2', 'Dimensions' => [{'Name' => 'InstanceId', 'Value' => instanceId}]}).body end end end diff --git a/tests/aws/requests/cloud_watch/list_metrics_test.rb b/tests/aws/requests/cloud_watch/list_metrics_test.rb index da8174a7d8..36584aecba 100644 --- a/tests/aws/requests/cloud_watch/list_metrics_test.rb +++ b/tests/aws/requests/cloud_watch/list_metrics_test.rb @@ -3,9 +3,9 @@ tests('success') do @metrics_list_format = { 'ListMetricsResult' => { - 'Metrics' => + 'Metrics' => [{ - 'Dimensions' => + 'Dimensions' => [{ 'Name' => String, 'Value' => String @@ -20,9 +20,9 @@ @instanceId = 'i-2f3eab59' @dimension_filtered_metrics_list_format = { 'ListMetricsResult' => { - 'Metrics' => + 'Metrics' => [{ - 'Dimensions' => + 'Dimensions' => [{ 'Name' => 'InstanceId', 'Value' => @instanceId @@ -37,18 +37,18 @@ tests("#list_metrics").formats(@metrics_list_format) do pending if Fog.mocking? - AWS[:cloud_watch].list_metrics.body + Fog::AWS[:cloud_watch].list_metrics.body end tests("#dimension_filtered_list_metrics").formats(@dimension_filtered_metrics_list_format) do pending if Fog.mocking? - AWS[:cloud_watch].list_metrics('Dimensions' => [{'Name' => 'InstanceId', 'Value' => @instanceId}]).body + Fog::AWS[:cloud_watch].list_metrics('Dimensions' => [{'Name' => 'InstanceId', 'Value' => @instanceId}]).body end tests("#metric_name_filtered_list_metrics").returns(true) do pending if Fog.mocking? metricName = "CPUUtilization" - AWS[:cloud_watch].list_metrics('MetricName' => metricName).body['ListMetricsResult']['Metrics'].all? do |metric| + Fog::AWS[:cloud_watch].list_metrics('MetricName' => metricName).body['ListMetricsResult']['Metrics'].all? do |metric| metric['MetricName'] == metricName end end @@ -56,9 +56,9 @@ tests("#namespace_filtered_list_metrics").returns(true) do pending if Fog.mocking? namespace = "AWS/EC2" - AWS[:cloud_watch].list_metrics('Namespace' => namespace).body['ListMetricsResult']['Metrics'].all? do |metric| + Fog::AWS[:cloud_watch].list_metrics('Namespace' => namespace).body['ListMetricsResult']['Metrics'].all? do |metric| metric['Namespace'] == namespace end end end -end \ No newline at end of file +end diff --git a/tests/aws/requests/cloud_watch/put_metric_data_tests.rb b/tests/aws/requests/cloud_watch/put_metric_data_tests.rb index 209c6af1e9..9165e98a74 100644 --- a/tests/aws/requests/cloud_watch/put_metric_data_tests.rb +++ b/tests/aws/requests/cloud_watch/put_metric_data_tests.rb @@ -6,18 +6,18 @@ tests('#puts_value').formats(@puts_format) do pending if Fog.mocking? - AWS[:cloud_watch].put_metric_data(namespace, [{'MetricName' => 'RequestTest', 'Unit' => 'None', 'Value' => 1}]).body + Fog::AWS[:cloud_watch].put_metric_data(namespace, [{'MetricName' => 'RequestTest', 'Unit' => 'None', 'Value' => 1}]).body end tests('#puts_statistics_set').succeeds do pending if Fog.mocking? - AWS[:cloud_watch].put_metric_data(namespace, [{'MetricName' => 'RequestTest', 'Unit' => 'None', 'StatisticValues' => {'Minimum' => 0, 'Maximum' => 9, 'Sum' => 45, 'SampleCount' => 10, 'Average' => 4.5}}]).body + Fog::AWS[:cloud_watch].put_metric_data(namespace, [{'MetricName' => 'RequestTest', 'Unit' => 'None', 'StatisticValues' => {'Minimum' => 0, 'Maximum' => 9, 'Sum' => 45, 'SampleCount' => 10, 'Average' => 4.5}}]).body end tests('#puts with dimensions').succeeds do pending if Fog.mocking? dimensions = [{}] - AWS[:cloud_watch].put_metric_data(namespace, [{'MetricName' => 'RequestTest', 'Unit' => 'None', 'Value' => 1, 'Dimensions' => dimensions}]).body + Fog::AWS[:cloud_watch].put_metric_data(namespace, [{'MetricName' => 'RequestTest', 'Unit' => 'None', 'Value' => 1, 'Dimensions' => dimensions}]).body end tests('#puts more than one').succeeds do @@ -29,7 +29,7 @@ end dp end - AWS[:cloud_watch].put_metric_data(namespace, datapoints).body + Fog::AWS[:cloud_watch].put_metric_data(namespace, datapoints).body end end diff --git a/tests/aws/requests/compute/image_tests.rb b/tests/aws/requests/compute/image_tests.rb index 4faeb8b091..9b3bcdb308 100644 --- a/tests/aws/requests/compute/image_tests.rb +++ b/tests/aws/requests/compute/image_tests.rb @@ -31,6 +31,11 @@ 'requestId' => String } + @modify_image_attribute_format = { + 'return' => Fog::Boolean, + 'requestId' => String + } + tests('success') do # the result for this is HUGE and relatively uninteresting... # tests("#describe_images").formats(@images_format) do @@ -39,6 +44,8 @@ @image_id = 'ami-1aad5273' if Fog.mocking? + @other_account = Fog::Compute::AWS.new(:aws_access_key_id => 'other', :aws_secret_access_key => 'account') + tests("#register_image").formats(@register_image_format) do @image = Fog::Compute[:aws].register_image('image', 'image', '/dev/sda1').body end @@ -53,6 +60,26 @@ tests("#describe_images('state' => 'available')").formats(@describe_images_format) do Fog::Compute[:aws].describe_images('state' => 'available').body end + + tests("other_account#describe_images('image-id' => '#{@image_id}')").returns([]) do + @other_account.describe_images('image-id' => @image_id).body['imagesSet'] + end + + tests("#modify_image_attribute('#{@image_id}', 'Add.UserId' => ['#{@other_account.data[:owner_id]}'])").formats(@modify_image_attribute_format) do + Fog::Compute[:aws].modify_image_attribute(@image_id, { 'Add.UserId' => [@other_account.data[:owner_id]] }).body + end + + tests("other_account#describe_images('image-id' => '#{@image_id}')").returns([@image_id]) do + @other_account.describe_images('image-id' => @image_id).body['imagesSet'].map {|i| i['imageId'] } + end + + tests("#modify_image_attribute('#{@image_id}', 'Remove.UserId' => ['#{@other_account.data[:owner_id]}'])").formats(@modify_image_attribute_format) do + Fog::Compute[:aws].modify_image_attribute(@image_id, { 'Remove.UserId' => [@other_account.data[:owner_id]] }).body + end + + tests("other_account#describe_images('image-id' => '#{@image_id}')").returns([]) do + @other_account.describe_images('image-id' => @image_id).body['imagesSet'] + end end tests("#describe_images('image-id' => '#{@image_id}')").formats(@describe_images_format) do @@ -65,4 +92,16 @@ end end end + + tests('failure') do + tests("#modify_image_attribute(nil, { 'Add.Group' => ['all'] })").raises(ArgumentError) do + Fog::Compute[:aws].modify_image_attribute(nil, { 'Add.Group' => ['all'] }).body + end + + tests("#modify_image_attribute('ami-00000000', { 'Add.UserId' => ['123456789012'] })").raises(Fog::Compute::AWS::NotFound) do + pending unless Fog.mocking? + + Fog::Compute[:aws].modify_image_attribute('ami-00000000', { 'Add.UserId' => ['123456789012'] }).body + end + end end diff --git a/tests/aws/requests/compute/instance_tests.rb b/tests/aws/requests/compute/instance_tests.rb index cdfdd32e69..2b2b5acb3e 100644 --- a/tests/aws/requests/compute/instance_tests.rb +++ b/tests/aws/requests/compute/instance_tests.rb @@ -138,6 +138,21 @@ data end + if Fog.mocking? + # Ensure the new instance doesn't show up in mock describe_instances right away + tests("#describe_instances").formats(@describe_instances_format) do + body = Fog::Compute[:aws].describe_instances.body + instance_ids = body['reservationSet'].map {|reservation| reservation['instancesSet'].map {|instance| instance['instanceId'] } }.flatten + test("doesn't include the new instance") { !instance_ids.include?(@instance_id) } + body + end + + # But querying for the new instance directly should raise an error + tests("#describe_instances('instance-id' => '#{@instance_id}')").raises(Fog::Compute::AWS::NotFound) do + Fog::Compute[:aws].describe_instances('instance-id' => @instance_id) + end + end + server = Fog::Compute[:aws].servers.get(@instance_id) while server.nil? do # It may take a moment to get the server after launching it diff --git a/tests/aws/requests/compute/security_group_tests.rb b/tests/aws/requests/compute/security_group_tests.rb index 628f889e72..8e86cad862 100644 --- a/tests/aws/requests/compute/security_group_tests.rb +++ b/tests/aws/requests/compute/security_group_tests.rb @@ -25,25 +25,207 @@ Fog::Compute[:aws].create_security_group('fog_security_group', 'tests group').body end - tests("#authorize_security_group_ingress('fog_security_group', {'FromPort' => 80, 'IpProtocol' => 'tcp', 'toPort' => 80})").formats(AWS::Compute::Formats::BASIC) do - Fog::Compute[:aws].authorize_security_group_ingress( - 'fog_security_group', + tests("#create_security_group('fog_security_group_two', 'tests group')").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].create_security_group('fog_security_group_two', 'tests group').body + end + + to_be_revoked = [] + expected_permissions = [] + + permission = { 'SourceSecurityGroupName' => 'default' } + tests("#authorize_security_group_ingress('fog_security_group', #{permission.inspect})").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', permission).body + end + + to_be_revoked.push([permission, expected_permissions.dup]) + + expected_permissions = [ + {"groups"=>[{"groupName"=>"default", "userId"=>@owner_id}], + "fromPort"=>1, + "ipRanges"=>[], + "ipProtocol"=>"tcp", + "toPort"=>65535}, + {"groups"=>[{"groupName"=>"default", "userId"=>@owner_id}], + "fromPort"=>1, + "ipRanges"=>[], + "ipProtocol"=>"udp", + "toPort"=>65535}, + {"groups"=>[{"groupName"=>"default", "userId"=>@owner_id}], + "fromPort"=>-1, + "ipRanges"=>[], + "ipProtocol"=>"icmp", + "toPort"=>-1} + ] + + tests("#describe_security_groups('group-name' => 'fog_security_group')").returns([]) do + array_differences(expected_permissions, Fog::Compute[:aws].describe_security_groups('group-name' => 'fog_security_group').body['securityGroupInfo'].first['ipPermissions']) + end + + permission = { 'SourceSecurityGroupName' => 'fog_security_group_two', 'SourceSecurityGroupOwnerId' => @owner_id } + tests("#authorize_security_group_ingress('fog_security_group', #{permission.inspect})").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', permission).body + end + + to_be_revoked.push([permission, expected_permissions.dup]) + + expected_permissions = [ + {"groups"=> + [{"userId"=>@owner_id, "groupName"=>"default"}, + {"userId"=>@owner_id, "groupName"=>"fog_security_group_two"}], + "ipRanges"=>[], + "ipProtocol"=>"tcp", + "fromPort"=>1, + "toPort"=>65535}, + {"groups"=> + [{"userId"=>@owner_id, "groupName"=>"default"}, + {"userId"=>@owner_id, "groupName"=>"fog_security_group_two"}], + "ipRanges"=>[], + "ipProtocol"=>"udp", + "fromPort"=>1, + "toPort"=>65535}, + {"groups"=> + [{"userId"=>@owner_id, "groupName"=>"default"}, + {"userId"=>@owner_id, "groupName"=>"fog_security_group_two"}], + "ipRanges"=>[], + "ipProtocol"=>"icmp", + "fromPort"=>-1, + "toPort"=>-1} + ] + + tests("#describe_security_groups('group-name' => 'fog_security_group')").returns([]) do + array_differences(expected_permissions, Fog::Compute[:aws].describe_security_groups('group-name' => 'fog_security_group').body['securityGroupInfo'].first['ipPermissions']) + end + + permission = { 'IpProtocol' => 'tcp', 'FromPort' => '22', 'ToPort' => '22' } + tests("#authorize_security_group_ingress('fog_security_group', #{permission.inspect})").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', permission).body + end + + to_be_revoked.push([permission, expected_permissions.dup]) + + # previous did nothing + tests("#describe_security_groups('group-name' => 'fog_security_group')").returns([]) do + array_differences(expected_permissions, Fog::Compute[:aws].describe_security_groups('group-name' => 'fog_security_group').body['securityGroupInfo'].first['ipPermissions']) + end + + permission = { 'IpProtocol' => 'tcp', 'FromPort' => '22', 'ToPort' => '22', 'CidrIp' => '10.0.0.0/8' } + tests("#authorize_security_group_ingress('fog_security_group', #{permission.inspect})").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', permission).body + end + + to_be_revoked.push([permission, expected_permissions.dup]) + + expected_permissions += [ + {"groups"=>[], + "ipRanges"=>[{"cidrIp"=>"10.0.0.0/8"}], + "ipProtocol"=>"tcp", + "fromPort"=>22, + "toPort"=>22} + ] + + tests("#describe_security_groups('group-name' => 'fog_security_group')").returns([]) do + array_differences(expected_permissions, Fog::Compute[:aws].describe_security_groups('group-name' => 'fog_security_group').body['securityGroupInfo'].first['ipPermissions']) + end + + # authorize with nested IpProtocol without IpRanges or Groups does nothing + permissions = { + 'IpPermissions' => [ + { 'IpProtocol' => 'tcp', 'FromPort' => '22', 'ToPort' => '22' } + ] + } + tests("#authorize_security_group_ingress('fog_security_group', #{permissions.inspect})").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', permissions).body + end + + to_be_revoked.push([permissions, expected_permissions.dup]) + + # previous did nothing + tests("#describe_security_groups('group-name' => 'fog_security_group')").returns([]) do + array_differences(expected_permissions, Fog::Compute[:aws].describe_security_groups('group-name' => 'fog_security_group').body['securityGroupInfo'].first['ipPermissions']) + end + + # authorize with nested IpProtocol with IpRanges + permissions = { + 'IpPermissions' => [ { - 'FromPort' => 80, - 'IpProtocol' => 'tcp', - 'ToPort' => 80, + 'IpProtocol' => 'tcp', 'FromPort' => '80', 'ToPort' => '80', + 'IpRanges' => [{ 'CidrIp' => '192.168.0.0/24' }] } - ).body + ] + } + tests("#authorize_security_group_ingress('fog_security_group', #{permissions.inspect})").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', permissions).body end - tests("#authorize_security_group_ingress('fog_security_group', {'SourceSecurityGroupName' => 'fog_security_group', 'SourceSecurityGroupOwnerId' => '#{@owner_id}'})").formats(AWS::Compute::Formats::BASIC) do - Fog::Compute[:aws].authorize_security_group_ingress( - 'fog_security_group', + to_be_revoked.push([permissions, expected_permissions.dup]) + + expected_permissions += [ + {"groups"=>[], + "ipRanges"=>[{"cidrIp"=>"192.168.0.0/24"}], + "ipProtocol"=>"tcp", + "fromPort"=>80, + "toPort"=>80} + ] + + tests("#describe_security_groups('group-name' => 'fog_security_group')").returns([]) do + array_differences(expected_permissions, Fog::Compute[:aws].describe_security_groups('group-name' => 'fog_security_group').body['securityGroupInfo'].first['ipPermissions']) + end + + # authorize with nested IpProtocol with Groups + permissions = { + 'IpPermissions' => [ { - 'SourceSecurityGroupName' => 'fog_security_group', - 'SourceSecurityGroupOwnerId' => @owner_id + 'IpProtocol' => 'tcp', 'FromPort' => '8000', 'ToPort' => '8000', + 'Groups' => [{ 'GroupName' => 'fog_security_group_two' }] + } + ] + } + tests("#authorize_security_group_ingress('fog_security_group', #{permissions.inspect})").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', permissions).body + end + + to_be_revoked.push([permissions, expected_permissions.dup]) + + expected_permissions += [ + {"groups"=>[{"userId"=>@owner_id, "groupName"=>"fog_security_group_two"}], + "ipRanges"=>[], + "ipProtocol"=>"tcp", + "fromPort"=>8000, + "toPort"=>8000} + ] + + tests("#describe_security_groups('group-name' => 'fog_security_group')").returns([]) do + array_differences(expected_permissions, Fog::Compute[:aws].describe_security_groups('group-name' => 'fog_security_group').body['securityGroupInfo'].first['ipPermissions']) + end + + # authorize with nested IpProtocol with IpRanges and Groups + # try integers on this one instead of strings + permissions = { + 'IpPermissions' => [ + { + 'IpProtocol' => 'tcp', 'FromPort' => 9000, 'ToPort' => 9000, + 'IpRanges' => [{ 'CidrIp' => '172.16.0.0/24' }], + 'Groups' => [{ 'GroupName' => 'fog_security_group_two' }] } - ).body + ] + } + tests("#authorize_security_group_ingress('fog_security_group', #{permissions.inspect})").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', permissions).body + end + + to_be_revoked.push([permissions, expected_permissions.dup]) + + expected_permissions += [ + {"groups"=> + [{"userId"=>@owner_id, "groupName"=>"fog_security_group_two"}], + "ipRanges"=>[{"cidrIp"=>"172.16.0.0/24"}], + "ipProtocol"=>"tcp", + "fromPort"=>9000, + "toPort"=>9000} + ] + + tests("#describe_security_groups('group-name' => 'fog_security_group')").returns([]) do + array_differences(expected_permissions, Fog::Compute[:aws].describe_security_groups('group-name' => 'fog_security_group').body['securityGroupInfo'].first['ipPermissions']) end tests("#describe_security_groups").formats(@security_groups_format) do @@ -54,36 +236,29 @@ Fog::Compute[:aws].describe_security_groups('group-name' => 'fog_security_group').body end - tests("#revoke_security_group_ingress('fog_security_group', {'FromPort' => 80, 'IpProtocol' => 'tcp', 'toPort' => 80})").formats(AWS::Compute::Formats::BASIC) do - Fog::Compute[:aws].revoke_security_group_ingress( - 'fog_security_group', - { - 'FromPort' => 80, - 'IpProtocol' => 'tcp', - 'ToPort' => 80, - } - ).body - end + to_be_revoked.reverse.each do |permission, expected_permissions_after| + tests("#revoke_security_group_ingress('fog_security_group', #{permission.inspect})").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].revoke_security_group_ingress('fog_security_group', permission).body + end - tests("#revoke_security_group_ingress('fog_security_group', {'SourceSecurityGroupName' => 'fog_security_group', 'SourceSecurityGroupOwnerId' => '#{@owner_id}'})").formats(AWS::Compute::Formats::BASIC) do - Fog::Compute[:aws].revoke_security_group_ingress( - 'fog_security_group', - { - 'GroupName' => 'fog_security_group', - 'SourceSecurityGroupName' => 'fog_security_group', - 'SourceSecurityGroupOwnerId' => @owner_id - } - ).body + tests("#describe_security_groups('group-name' => 'fog_security_group')").returns([]) do + array_differences(expected_permissions_after, Fog::Compute[:aws].describe_security_groups('group-name' => 'fog_security_group').body['securityGroupInfo'].first['ipPermissions']) + end end tests("#delete_security_group('fog_security_group')").formats(AWS::Compute::Formats::BASIC) do Fog::Compute[:aws].delete_security_group('fog_security_group').body end + tests("#delete_security_group('fog_security_group_two')").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].delete_security_group('fog_security_group_two').body + end + end tests('failure') do @security_group = Fog::Compute[:aws].security_groups.create(:description => 'tests group', :name => 'fog_security_group') + @other_security_group = Fog::Compute[:aws].security_groups.create(:description => 'tests group', :name => 'fog_other_security_group') tests("duplicate #create_security_group(#{@security_group.name}, #{@security_group.description})").raises(Fog::Compute::AWS::Error) do Fog::Compute[:aws].create_security_group(@security_group.name, @security_group.description) @@ -110,6 +285,46 @@ ) end + tests("#authorize_security_group_ingress('fog_security_group', {'IpPermissions' => [{'IpProtocol' => 'tcp', 'FromPort' => 80, 'ToPort' => 80, 'IpRanges' => [{'CidrIp' => '10.0.0.0/8'}]}]})").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', {'IpPermissions' => [{'IpProtocol' => 'tcp', 'FromPort' => 80, 'ToPort' => 80, 'IpRanges' => [{'CidrIp' => '10.0.0.0/8'}]}]}).body + end + + tests("#authorize_security_group_ingress('fog_security_group', {'IpPermissions' => [{'IpProtocol' => 'tcp', 'FromPort' => 80, 'ToPort' => 80, 'IpRanges' => [{'CidrIp' => '10.0.0.0/8'}]}]})").raises(Fog::Compute::AWS::Error) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', {'IpPermissions' => [{'IpProtocol' => 'tcp', 'FromPort' => 80, 'ToPort' => 80, 'IpRanges' => [{'CidrIp' => '10.0.0.0/8'}]}]}) + end + + tests("#authorize_security_group_ingress('fog_security_group', {'IpPermissions' => [{'Groups' => [{'GroupName' => '#{@other_security_group.name}'}], 'FromPort' => 80, 'ToPort' => 80, 'IpProtocol' => 'tcp'}]})").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', {'IpPermissions' => [{'Groups' => [{'GroupName' => @other_security_group.name}], 'FromPort' => 80, 'ToPort' => 80, 'IpProtocol' => 'tcp'}]}).body + end + + tests("#delete_security_group('#{@other_security_group.name}')").raises(Fog::Compute::AWS::Error) do + Fog::Compute[:aws].delete_security_group(@other_security_group.name) + end + + broken_params = [ + {}, + { "IpProtocol" => "what" }, + { "IpProtocol" => "tcp" }, + { "IpProtocol" => "what", "FromPort" => 1, "ToPort" => 1 }, + ] + broken_params += broken_params.map do |broken_params_item| + { "IpPermissions" => [broken_params_item] } + end + broken_params += [ + { "IpPermissions" => [] }, + { "IpPermissions" => nil } + ] + + broken_params.each do |broken_params_item| + tests("#authorize_security_group_ingress('fog_security_group', #{broken_params_item.inspect})").raises(Fog::Compute::AWS::Error) do + Fog::Compute[:aws].authorize_security_group_ingress('fog_security_group', broken_params_item) + end + + tests("#revoke_security_group_ingress('fog_security_group', #{broken_params_item.inspect})").raises(Fog::Compute::AWS::Error) do + Fog::Compute[:aws].revoke_security_group_ingress('fog_security_group', broken_params_item) + end + end + tests("#revoke_security_group_ingress('not_a_group_name', {'FromPort' => 80, 'IpProtocol' => 'tcp', 'toPort' => 80})").raises(Fog::Compute::AWS::NotFound) do Fog::Compute[:aws].revoke_security_group_ingress( 'not_a_group_name', @@ -136,7 +351,11 @@ end @security_group.destroy + @other_security_group.destroy + tests("#delete_security_group('default')").raises(Fog::Compute::AWS::Error) do + Fog::Compute[:aws].delete_security_group('default') + end end end diff --git a/tests/aws/requests/compute/tag_tests.rb b/tests/aws/requests/compute/tag_tests.rb index c853579c42..f2d69ac08e 100644 --- a/tests/aws/requests/compute/tag_tests.rb +++ b/tests/aws/requests/compute/tag_tests.rb @@ -13,14 +13,56 @@ @volume.wait_for { ready? } tests('success') do + if Fog.mocking? + @other_account = Fog::Compute::AWS.new(:aws_access_key_id => 'other', :aws_secret_access_key => 'account') + @image_id = Fog::Compute[:aws].register_image('image', 'image', '/dev/sda1').body['imageId'] + end + tests("#create_tags('#{@volume.identity}', 'foo' => 'bar')").formats(AWS::Compute::Formats::BASIC) do Fog::Compute[:aws].create_tags(@volume.identity, 'foo' => 'bar').body end + if Fog.mocking? + tests("#create_tags('#{@image_id}', 'foo' => 'baz')").formats(AWS::Compute::Formats::BASIC) do + Fog::Compute[:aws].create_tags(@image_id, 'foo' => 'baz').body + end + end + tests('#describe_tags').formats(@tags_format) do Fog::Compute[:aws].describe_tags.body end + expected_identities = Fog.mocking? ? [@volume.identity, @image_id] : [@volume.identity] + tests('#describe_tags').succeeds do + (expected_identities - Fog::Compute[:aws].describe_tags.body['tagSet'].map {|t| t['resourceId'] }).empty? + end + + tests("#describe_tags('key' => 'foo', 'value' => 'bar')").returns([@volume.identity]) do + Fog::Compute[:aws].describe_tags('key' => 'foo', 'value' => 'bar').body['tagSet'].map {|t| t['resourceId'] } + end + + if Fog.mocking? + tests("#describe_tags('key' => 'foo', 'value' => 'baz')").returns([@image_id]) do + Fog::Compute[:aws].describe_tags('key' => 'foo', 'value' => 'baz').body['tagSet'].map {|t| t['resourceId'] } + end + + Fog::Compute[:aws].modify_image_attribute(@image_id, 'Add.UserId' => [@other_account.data[:owner_id]]) + + tests("other_account#describe_tags('key' => 'foo', 'value' => 'baz')").returns([]) do + @other_account.describe_tags('key' => 'foo', 'value' => 'baz').body['tagSet'].map {|t| t['resourceId'] } + end + + tests("other_account#create_tags('#{@image_id}', 'foo' => 'quux')").formats(AWS::Compute::Formats::BASIC) do + @other_account.create_tags(@image_id, 'foo' => 'quux').body + end + + tests("other_account#describe_tags('key' => 'foo', 'value' => 'quux')").returns([@image_id]) do + @other_account.describe_tags('key' => 'foo', 'value' => 'quux').body['tagSet'].map {|t| t['resourceId'] } + end + end + + @volume.destroy + tests("#delete_tags('#{@volume.identity}', 'foo' => 'bar')").formats(AWS::Compute::Formats::BASIC) do Fog::Compute[:aws].delete_tags(@volume.identity, 'foo' => 'bar').body end @@ -32,6 +74,4 @@ end end - - @volume.destroy end diff --git a/tests/aws/requests/elasticache/cache_cluster_tests.rb b/tests/aws/requests/elasticache/cache_cluster_tests.rb new file mode 100644 index 0000000000..227e96f13f --- /dev/null +++ b/tests/aws/requests/elasticache/cache_cluster_tests.rb @@ -0,0 +1,137 @@ +Shindo.tests('AWS::Elasticache | cache cluster requests', ['aws', 'elasticache']) do + + tests('success') do + pending if Fog.mocking? + + # Randomize the cluster ID so tests can be fequently re-run + CLUSTER_ID = "fog-test-cluster-#{rand(999).to_s}" # 20 chars max! + NUM_NODES = 2 # Must be > 1, because one of the tests reomves a node! + + tests( + '#create_cache_cluster' + ).formats(AWS::Elasticache::Formats::SINGLE_CACHE_CLUSTER) do + body = AWS[:elasticache].create_cache_cluster(CLUSTER_ID, + :num_nodes => NUM_NODES + ).body + cluster = body['CacheCluster'] + returns(CLUSTER_ID) { cluster['CacheClusterId'] } + returns('creating') { cluster['CacheClusterStatus'] } + body + end + + tests( + '#describe_cache_clusters without options' + ).formats(AWS::Elasticache::Formats::DESCRIBE_CACHE_CLUSTERS) do + body = AWS[:elasticache].describe_cache_clusters.body + returns(true, "has #{CLUSTER_ID}") do + body['CacheClusters'].any? do |cluster| + cluster['CacheClusterId'] == CLUSTER_ID + end + end + # The DESCRIBE_CACHE_CLUSTERS format must include only one cluster + # So remove all but the relevant cluster from the response body + test_cluster = body['CacheClusters'].delete_if do |cluster| + cluster['CacheClusterId'] != CLUSTER_ID + end + body + end + + tests( + '#describe_cache_clusters with cluster ID' + ).formats(AWS::Elasticache::Formats::DESCRIBE_CACHE_CLUSTERS) do + body = AWS[:elasticache].describe_cache_clusters(CLUSTER_ID).body + returns(1, "size of 1") { body['CacheClusters'].size } + returns(CLUSTER_ID, "has #{CLUSTER_ID}") do + body['CacheClusters'].first['CacheClusterId'] + end + body + end + + Formatador.display_line "Waiting for cluster #{CLUSTER_ID}..." + AWS[:elasticache].clusters.get(CLUSTER_ID).wait_for {ready?} + + tests( + '#describe_cache_clusters with node info' + ).formats(AWS::Elasticache::Formats::CACHE_CLUSTER_RUNNING) do + cluster = AWS[:elasticache].describe_cache_clusters(CLUSTER_ID, + :show_node_info => true + ).body['CacheClusters'].first + returns(NUM_NODES, "has #{NUM_NODES} nodes") do + cluster['CacheNodes'].count + end + cluster + end + + tests( + '#modify_cache_cluster - change a non-pending cluster attribute' + ).formats(AWS::Elasticache::Formats::CACHE_CLUSTER_RUNNING) do + body = AWS[:elasticache].modify_cache_cluster(CLUSTER_ID, + :auto_minor_version_upgrade => false + ).body + # now check that parameter change is in place + returns('false') { body['CacheCluster']['AutoMinorVersionUpgrade'] } + body['CacheCluster'] + end + + tests( + '#reboot_cache_cluster - reboot a node' + ).formats(AWS::Elasticache::Formats::CACHE_CLUSTER_RUNNING) do + c = AWS[:elasticache].clusters.get(CLUSTER_ID) + node_id = c.nodes.last['CacheNodeId'] + Formatador.display_line "Rebooting node #{node_id}..." + body = AWS[:elasticache].reboot_cache_cluster(c.id, [ node_id ]).body + returns('rebooting cache cluster nodes') do + body['CacheCluster']['CacheClusterStatus'] + end + body['CacheCluster'] + end + + Formatador.display_line "Waiting for cluster #{CLUSTER_ID}..." + AWS[:elasticache].clusters.get(CLUSTER_ID).wait_for {ready?} + + tests( + '#modify_cache_cluster - remove a node' + ).formats(AWS::Elasticache::Formats::CACHE_CLUSTER_RUNNING) do + c = AWS[:elasticache].clusters.get(CLUSTER_ID) + node_id = c.nodes.last['CacheNodeId'] + Formatador.display_line "Removing node #{node_id}..." + body = AWS[:elasticache].modify_cache_cluster(c.id, + { + :num_nodes => NUM_NODES - 1, + :nodes_to_remove => node_id, + :apply_immediately => true, + }).body + c.reload + returns(NUM_NODES - 1) { c.pending_values['NumCacheNodes'] } + body['CacheCluster'] + end + + Formatador.display_line "Waiting for cluster #{CLUSTER_ID}..." + AWS[:elasticache].clusters.get(CLUSTER_ID).wait_for {ready?} + + tests( + '#delete_cache_clusters' + ).formats(AWS::Elasticache::Formats::CACHE_CLUSTER_RUNNING) do + body = AWS[:elasticache].delete_cache_cluster(CLUSTER_ID).body + # make sure this particular cluster is in the returned list + returns(true, "has #{CLUSTER_ID}") do + body['CacheClusters'].any? do |cluster| + cluster['CacheClusterId'] == CLUSTER_ID + end + end + # now check that it reports itself as 'deleting' + cluster = body['CacheClusters'].find do |cluster| + cluster['CacheClusterId'] == CLUSTER_ID + end + returns('deleting') { cluster['CacheClusterStatus'] } + cluster + end + end + + tests('failure') do + # TODO: + # Create a duplicate cluster ID + # List a missing cache cluster + # Delete a missing cache cluster + end +end diff --git a/tests/aws/requests/elasticache/describe_events.rb b/tests/aws/requests/elasticache/describe_events.rb new file mode 100644 index 0000000000..eaa6a32aa2 --- /dev/null +++ b/tests/aws/requests/elasticache/describe_events.rb @@ -0,0 +1,17 @@ +Shindo.tests('AWS::Elasticache | describe cache cluster events', + ['aws', 'elasticache']) do + + tests('success') do + pending if Fog.mocking? + + tests( + '#describe_events' + ).formats(AWS::Elasticache::Formats::EVENT_LIST) do + AWS[:elasticache].describe_events().body['Events'] + end + end + + tests('failure') do + # TODO: + end +end diff --git a/tests/aws/requests/elasticache/helper.rb b/tests/aws/requests/elasticache/helper.rb new file mode 100644 index 0000000000..65c42178dd --- /dev/null +++ b/tests/aws/requests/elasticache/helper.rb @@ -0,0 +1,68 @@ +class AWS + module Elasticache + module Formats + + BASIC = { + 'ResponseMetadata' => {'RequestId' => String} + } + + # Cache Security Groups + SECURITY_GROUP = { + 'EC2SecurityGroups' => Array, + 'CacheSecurityGroupName' => String, + 'Description' => String, + 'OwnerId' => String, + } + SINGLE_SECURITY_GROUP = BASIC.merge('CacheSecurityGroup' => SECURITY_GROUP) + DESCRIBE_SECURITY_GROUPS = {'CacheSecurityGroups' => [SECURITY_GROUP]} + + # Cache Parameter Groups + PARAMETER_GROUP = { + 'CacheParameterGroupFamily' => String, + 'CacheParameterGroupName' => String, + 'Description' => String, + } + SINGLE_PARAMETER_GROUP = BASIC.merge('CacheParameterGroup' => PARAMETER_GROUP) + DESCRIBE_PARAMETER_GROUPS = BASIC.merge('CacheParameterGroups' => [PARAMETER_GROUP]) + MODIFY_PARAMETER_GROUP = {'CacheParameterGroupName' => String } + PARAMETER_SET = { + 'Parameters' => Array, + 'CacheNodeTypeSpecificParameters' => Array, + } + ENGINE_DEFAULTS = PARAMETER_SET.merge('CacheParameterGroupFamily' => String) + # Cache Clusters - more parameters get added as the lifecycle progresses + CACHE_CLUSTER = { + 'AutoMinorVersionUpgrade' => String, # actually TrueClass or FalseClass + 'CacheSecurityGroups' => Array, + 'CacheClusterId' => String, + 'CacheClusterStatus' => String, + 'CacheNodeType' => String, + 'Engine' => String, + 'EngineVersion' => String, + 'CacheParameterGroup' => Hash, + 'NumCacheNodes' => Integer, + 'PreferredMaintenanceWindow' => String, + 'CacheNodes' => Array, + 'PendingModifiedValues' => Hash, + } + CACHE_CLUSTER_RUNNING = CACHE_CLUSTER.merge({ + 'CacheClusterCreateTime' => DateTime, + 'PreferredAvailabilityZone' => String, + }) + CACHE_CLUSTER_MODIFIED = CACHE_CLUSTER_RUNNING.merge({ + 'NotificationConfiguration' => Hash, + 'PendingModifiedValues' => Hash, + }) + SINGLE_CACHE_CLUSTER = BASIC.merge('CacheCluster' => CACHE_CLUSTER) + DESCRIBE_CACHE_CLUSTERS = BASIC.merge('CacheClusters' => [CACHE_CLUSTER]) + + EVENT = { + 'Date' => DateTime, + 'Message' => String, + 'SourceIdentifier' => String, + 'SourceType' => String, + } + EVENT_LIST = [EVENT] + end + end +end diff --git a/tests/aws/requests/elasticache/parameter_group_tests.rb b/tests/aws/requests/elasticache/parameter_group_tests.rb new file mode 100644 index 0000000000..210ce18844 --- /dev/null +++ b/tests/aws/requests/elasticache/parameter_group_tests.rb @@ -0,0 +1,105 @@ +Shindo.tests('AWS::Elasticache | parameter group requests', ['aws', 'elasticache']) do + + tests('success') do + pending if Fog.mocking? + + name = 'fog-test' + description = 'Fog Test Parameter Group' + + tests( + '#describe_engine_default_parameters' + ).formats(AWS::Elasticache::Formats::ENGINE_DEFAULTS) do + response = AWS[:elasticache].describe_engine_default_parameters + engine_defaults = response.body['EngineDefaults'] + returns('memcached1.4') { engine_defaults['CacheParameterGroupFamily'] } + engine_defaults + end + + tests( + '#create_cache_parameter_group' + ).formats(AWS::Elasticache::Formats::SINGLE_PARAMETER_GROUP) do + body = AWS[:elasticache].create_cache_parameter_group(name, description).body + group = body['CacheParameterGroup'] + returns(name) { group['CacheParameterGroupName'] } + returns(description) { group['Description'] } + returns('memcached1.4') { group['CacheParameterGroupFamily'] } + body + end + + tests( + '#describe_cache_parameters' + ).formats(AWS::Elasticache::Formats::PARAMETER_SET) do + response = AWS[:elasticache].describe_cache_parameters(name) + parameter_set = response.body['DescribeCacheParametersResult'] + parameter_set + end + + tests( + '#describe_cache_parameter_groups without options' + ).formats(AWS::Elasticache::Formats::DESCRIBE_PARAMETER_GROUPS) do + body = AWS[:elasticache].describe_cache_parameter_groups.body + returns(true, "has #{name}") do + body['CacheParameterGroups'].any? do |group| + group['CacheParameterGroupName'] == name + end + end + body + end + + tests( + '#reset_cache_parameter_group completely' + ).formats('CacheParameterGroupName' => String) do + result = AWS[:elasticache].reset_cache_parameter_group( + name + ).body['ResetCacheParameterGroupResult'] + returns(name) {result['CacheParameterGroupName']} + result + end + + tests( + '#modify_cache_parameter_group' + ).formats('CacheParameterGroupName' => String) do + result = AWS[:elasticache].modify_cache_parameter_group( + name, {"chunk_size" => 32} + ).body['ModifyCacheParameterGroupResult'] + returns(name) {result['CacheParameterGroupName']} + result + end + + # BUG: returns "MalformedInput - Unexpected complex element termination" + tests( + '#reset_cache_parameter_group with one parameter' + ).formats('CacheParameterGroupName' => String) do + pending + result = AWS[:elasticache].reset_cache_parameter_group( + name, ["chunk_size"] + ).body['ResetCacheParameterGroupResult'] + returns(name) {result['CacheParameterGroupName']} + result + end + + tests( + '#describe_cache_parameter_groups with name' + ).formats(AWS::Elasticache::Formats::DESCRIBE_PARAMETER_GROUPS) do + body = AWS[:elasticache].describe_cache_parameter_groups(name).body + returns(1, "size of 1") { body['CacheParameterGroups'].size } + returns(name, "has #{name}") do + body['CacheParameterGroups'].first['CacheParameterGroupName'] + end + body + end + + tests( + '#delete_cache_parameter_group' + ).formats(AWS::Elasticache::Formats::BASIC) do + body = AWS[:elasticache].delete_cache_parameter_group(name).body + end + end + + tests('failure') do + # TODO: + # Create a duplicate parameter group + # List a missing parameter group + # Delete a missing parameter group + end +end diff --git a/tests/aws/requests/elasticache/security_group_tests.rb b/tests/aws/requests/elasticache/security_group_tests.rb new file mode 100644 index 0000000000..774e5b1da8 --- /dev/null +++ b/tests/aws/requests/elasticache/security_group_tests.rb @@ -0,0 +1,107 @@ +Shindo.tests('AWS::Elasticache | security group requests', ['aws', 'elasticache']) do + + tests('success') do + pending if Fog.mocking? + + name = 'fog-test' + description = 'Fog Test Group' + + tests( + '#create_cache_security_group' + ).formats(AWS::Elasticache::Formats::SINGLE_SECURITY_GROUP) do + body = AWS[:elasticache].create_cache_security_group(name, description).body + group = body['CacheSecurityGroup'] + returns(name) { group['CacheSecurityGroupName'] } + returns(description) { group['Description'] } + returns([], "no authorized security group") { group['EC2SecurityGroups'] } + body + end + + tests( + '#describe_cache_security_groups without options' + ).formats(AWS::Elasticache::Formats::DESCRIBE_SECURITY_GROUPS) do + body = AWS[:elasticache].describe_cache_security_groups.body + returns(true, "has #{name}") do + body['CacheSecurityGroups'].any? do |group| + group['CacheSecurityGroupName'] == name + end + end + body + end + + tests( + '#describe_cache_security_groups with name' + ).formats(AWS::Elasticache::Formats::DESCRIBE_SECURITY_GROUPS) do + body = AWS[:elasticache].describe_cache_security_groups(name).body + returns(1, "size of 1") { body['CacheSecurityGroups'].size } + returns(name, "has #{name}") do + body['CacheSecurityGroups'].first['CacheSecurityGroupName'] + end + body + end + + tests('authorization') do + ec2_group = Fog::Compute.new(:provider => 'AWS').security_groups.create( + :name => 'fog-test-elasticache', :description => 'Fog Test Elasticache' + ) + # Reload to get the owner_id + ec2_group.reload + + tests( + '#authorize_cache_security_group_ingress' + ).formats(AWS::Elasticache::Formats::SINGLE_SECURITY_GROUP) do + body = AWS[:elasticache].authorize_cache_security_group_ingress( + name, ec2_group.name, ec2_group.owner_id + ).body + group = body['CacheSecurityGroup'] + expected_ec2_groups = [{ + 'Status' => 'authorizing', 'EC2SecurityGroupName' => ec2_group.name, + 'EC2SecurityGroupOwnerId' => ec2_group.owner_id + }] + returns(expected_ec2_groups, 'has correct EC2 groups') do + group['EC2SecurityGroups'] + end + body + end + + # Wait for the state to be active + Fog.wait_for do + response = AWS[:elasticache].describe_cache_security_groups(name) + group = response.body['CacheSecurityGroups'].first + group['EC2SecurityGroups'].all? {|ec2| ec2['Status'] == 'authorized'} + end + + tests( + '#revoke_cache_security_group_ingress' + ).formats(AWS::Elasticache::Formats::SINGLE_SECURITY_GROUP) do + body = AWS[:elasticache].revoke_cache_security_group_ingress( + name, ec2_group.name, ec2_group.owner_id + ).body + group = body['CacheSecurityGroup'] + expected_ec2_groups = [{ + 'Status' => 'revoking', 'EC2SecurityGroupName' => ec2_group.name, + 'EC2SecurityGroupOwnerId' => ec2_group.owner_id + }] + returns(expected_ec2_groups, 'has correct EC2 groups') do + group['EC2SecurityGroups'] + end + body + end + + ec2_group.destroy + end + + tests( + '#delete_cache_security_group' + ).formats(AWS::Elasticache::Formats::BASIC) do + body = AWS[:elasticache].delete_cache_security_group(name).body + end + end + + tests('failure') do + # TODO: + # Create a duplicate security group + # List a missing security group + # Delete a missing security group + end +end diff --git a/tests/aws/requests/elb/listener_tests.rb b/tests/aws/requests/elb/listener_tests.rb index 0208149c64..d89f8d7113 100644 --- a/tests/aws/requests/elb/listener_tests.rb +++ b/tests/aws/requests/elb/listener_tests.rb @@ -3,28 +3,28 @@ @key_name = 'fog-test' tests('success') do - AWS[:elb].create_load_balancer(['us-east-1a'], @load_balancer_id, [{'LoadBalancerPort' => 80, 'InstancePort' => 80, 'Protocol' => 'HTTP'}]) - @certificate = AWS[:iam].upload_server_certificate(AWS::IAM::SERVER_CERT_PUBLIC_KEY, AWS::IAM::SERVER_CERT_PRIVATE_KEY, @key_name).body['Certificate'] + Fog::AWS[:elb].create_load_balancer(['us-east-1a'], @load_balancer_id, [{'LoadBalancerPort' => 80, 'InstancePort' => 80, 'Protocol' => 'HTTP'}]) + @certificate = Fog::AWS[:iam].upload_server_certificate(AWS::IAM::SERVER_CERT_PUBLIC_KEY, AWS::IAM::SERVER_CERT_PRIVATE_KEY, @key_name).body['Certificate'] tests("#create_load_balancer_listeners").formats(AWS::ELB::Formats::BASIC) do listeners = [ {'Protocol' => 'TCP', 'LoadBalancerPort' => 443, 'InstancePort' => 443, 'SSLCertificateId' => @certificate['Arn']}, {'Protocol' => 'HTTP', 'LoadBalancerPort' => 80, 'InstancePort' => 80} ] - response = AWS[:elb].create_load_balancer_listeners(@load_balancer_id, listeners).body + response = Fog::AWS[:elb].create_load_balancer_listeners(@load_balancer_id, listeners).body response end tests("#delete_load_balancer_listeners").formats(AWS::ELB::Formats::BASIC) do ports = [80, 443] - AWS[:elb].delete_load_balancer_listeners(@load_balancer_id, ports).body + Fog::AWS[:elb].delete_load_balancer_listeners(@load_balancer_id, ports).body end tests("#create_load_balancer_listeners with non-existant SSL certificate") do listeners = [ {'Protocol' => 'HTTPS', 'LoadBalancerPort' => 443, 'InstancePort' => 443, 'SSLCertificateId' => 'non-existant'}, ] - raises(Fog::AWS::IAM::NotFound) { AWS[:elb].create_load_balancer_listeners(@load_balancer_id, listeners) } + raises(Fog::AWS::IAM::NotFound) { Fog::AWS[:elb].create_load_balancer_listeners(@load_balancer_id, listeners) } end tests("#create_load_balancer_listeners with invalid SSL certificate").raises(Fog::AWS::IAM::NotFound) do @@ -32,7 +32,7 @@ listeners = [ {'Protocol' => 'HTTPS', 'LoadBalancerPort' => 443, 'InstancePort' => 443, 'SSLCertificateId' => "#{@certificate['Arn']}fake"}, ] - AWS[:elb].create_load_balancer_listeners(@load_balancer_id, listeners).body + Fog::AWS[:elb].create_load_balancer_listeners(@load_balancer_id, listeners).body end # This is sort of fucked up, but it may or may not fail, thanks AWS @@ -41,10 +41,10 @@ listeners = [ {'Protocol' => 'HTTPS', 'LoadBalancerPort' => 443, 'InstancePort' => 443, 'SSLCertificateId' => @certificate['Arn']}, ] - AWS[:elb].create_load_balancer_listeners(@load_balancer_id, listeners).body + Fog::AWS[:elb].create_load_balancer_listeners(@load_balancer_id, listeners).body end - AWS[:iam].delete_server_certificate(@key_name) - AWS[:elb].delete_load_balancer(@load_balancer_id) + Fog::AWS[:iam].delete_server_certificate(@key_name) + Fog::AWS[:elb].delete_load_balancer(@load_balancer_id) end end diff --git a/tests/aws/requests/elb/load_balancer_tests.rb b/tests/aws/requests/elb/load_balancer_tests.rb index 2d8fb0f661..ebb2b02824 100644 --- a/tests/aws/requests/elb/load_balancer_tests.rb +++ b/tests/aws/requests/elb/load_balancer_tests.rb @@ -3,20 +3,20 @@ @key_name = 'fog-test' tests('success') do - @certificate = AWS[:iam].upload_server_certificate(AWS::IAM::SERVER_CERT_PUBLIC_KEY, AWS::IAM::SERVER_CERT_PRIVATE_KEY, @key_name).body['Certificate'] + @certificate = Fog::AWS[:iam].upload_server_certificate(AWS::IAM::SERVER_CERT_PUBLIC_KEY, AWS::IAM::SERVER_CERT_PRIVATE_KEY, @key_name).body['Certificate'] tests("#create_load_balancer").formats(AWS::ELB::Formats::CREATE_LOAD_BALANCER) do zones = ['us-east-1a'] listeners = [{'LoadBalancerPort' => 80, 'InstancePort' => 80, 'Protocol' => 'HTTP'}] - AWS[:elb].create_load_balancer(zones, @load_balancer_id, listeners).body + Fog::AWS[:elb].create_load_balancer(zones, @load_balancer_id, listeners).body end tests("#describe_load_balancers").formats(AWS::ELB::Formats::DESCRIBE_LOAD_BALANCERS) do - AWS[:elb].describe_load_balancers.body + Fog::AWS[:elb].describe_load_balancers.body end tests('#describe_load_balancers with bad lb') do - raises(Fog::AWS::ELB::NotFound) { AWS[:elb].describe_load_balancers('none-such-lb') } + raises(Fog::AWS::ELB::NotFound) { Fog::AWS[:elb].describe_load_balancers('none-such-lb') } end tests("#describe_load_balancers with SSL listener") do @@ -24,8 +24,8 @@ listeners = [ {'Protocol' => 'HTTPS', 'LoadBalancerPort' => 443, 'InstancePort' => 443, 'SSLCertificateId' => @certificate['Arn']}, ] - AWS[:elb].create_load_balancer_listeners(@load_balancer_id, listeners) - response = AWS[:elb].describe_load_balancers(@load_balancer_id).body + Fog::AWS[:elb].create_load_balancer_listeners(@load_balancer_id, listeners) + response = Fog::AWS[:elb].describe_load_balancers(@load_balancer_id).body tests("SSLCertificateId is set").returns(@certificate['Arn']) do listeners = response["DescribeLoadBalancersResult"]["LoadBalancerDescriptions"].first["ListenerDescriptions"] listeners.find {|l| l["Listener"]["Protocol"] == 'HTTPS' }["Listener"]["SSLCertificateId"] @@ -41,21 +41,21 @@ 'HealthyThreshold' => 3 } - AWS[:elb].configure_health_check(@load_balancer_id, health_check).body + Fog::AWS[:elb].configure_health_check(@load_balancer_id, health_check).body end tests("#delete_load_balancer").formats(AWS::ELB::Formats::DELETE_LOAD_BALANCER) do - AWS[:elb].delete_load_balancer(@load_balancer_id).body + Fog::AWS[:elb].delete_load_balancer(@load_balancer_id).body end tests("#delete_load_balancer when non existant").formats(AWS::ELB::Formats::DELETE_LOAD_BALANCER) do - AWS[:elb].delete_load_balancer('non-existant').body + Fog::AWS[:elb].delete_load_balancer('non-existant').body end tests("#delete_load_balancer when already deleted").formats(AWS::ELB::Formats::DELETE_LOAD_BALANCER) do - AWS[:elb].delete_load_balancer(@load_balancer_id).body + Fog::AWS[:elb].delete_load_balancer(@load_balancer_id).body end - AWS[:iam].delete_server_certificate(@key_name) + Fog::AWS[:iam].delete_server_certificate(@key_name) end end diff --git a/tests/aws/requests/elb/policy_tests.rb b/tests/aws/requests/elb/policy_tests.rb index 285a044fb3..9c5f279813 100644 --- a/tests/aws/requests/elb/policy_tests.rb +++ b/tests/aws/requests/elb/policy_tests.rb @@ -3,39 +3,39 @@ tests('success') do listeners = [{'LoadBalancerPort' => 80, 'InstancePort' => 80, 'Protocol' => 'HTTP'}] - AWS[:elb].create_load_balancer(['us-east-1a'], @load_balancer_id, listeners) + Fog::AWS[:elb].create_load_balancer(['us-east-1a'], @load_balancer_id, listeners) tests("#create_app_cookie_stickiness_policy").formats(AWS::ELB::Formats::BASIC) do cookie, policy = 'fog-app-cookie', 'fog-app-policy' - AWS[:elb].create_app_cookie_stickiness_policy(@load_balancer_id, policy, cookie).body + Fog::AWS[:elb].create_app_cookie_stickiness_policy(@load_balancer_id, policy, cookie).body end tests("#create_lb_cookie_stickiness_policy with expiry").formats(AWS::ELB::Formats::BASIC) do policy = 'fog-lb-expiry' expiry = 300 - AWS[:elb].create_lb_cookie_stickiness_policy(@load_balancer_id, policy, expiry).body + Fog::AWS[:elb].create_lb_cookie_stickiness_policy(@load_balancer_id, policy, expiry).body end tests("#create_lb_cookie_stickiness_policy without expiry").formats(AWS::ELB::Formats::BASIC) do policy = 'fog-lb-no-expiry' - AWS[:elb].create_lb_cookie_stickiness_policy(@load_balancer_id, policy).body + Fog::AWS[:elb].create_lb_cookie_stickiness_policy(@load_balancer_id, policy).body end tests("#delete_load_balancer_policy").formats(AWS::ELB::Formats::BASIC) do policy = 'fog-lb-no-expiry' - AWS[:elb].delete_load_balancer_policy(@load_balancer_id, policy).body + Fog::AWS[:elb].delete_load_balancer_policy(@load_balancer_id, policy).body end tests("#set_load_balancer_policies_of_listener adds policy").formats(AWS::ELB::Formats::BASIC) do port, policies = 80, ['fog-lb-expiry'] - AWS[:elb].set_load_balancer_policies_of_listener(@load_balancer_id, port, policies).body + Fog::AWS[:elb].set_load_balancer_policies_of_listener(@load_balancer_id, port, policies).body end tests("#set_load_balancer_policies_of_listener removes policy").formats(AWS::ELB::Formats::BASIC) do port = 80 - AWS[:elb].set_load_balancer_policies_of_listener(@load_balancer_id, port, []).body + Fog::AWS[:elb].set_load_balancer_policies_of_listener(@load_balancer_id, port, []).body end - AWS[:elb].delete_load_balancer(@load_balancer_id) + Fog::AWS[:elb].delete_load_balancer(@load_balancer_id) end end diff --git a/tests/aws/requests/iam/access_key_tests.rb b/tests/aws/requests/iam/access_key_tests.rb index 3bf5976072..255068f20b 100644 --- a/tests/aws/requests/iam/access_key_tests.rb +++ b/tests/aws/requests/iam/access_key_tests.rb @@ -1,7 +1,7 @@ Shindo.tests('AWS::IAM | access key requests', ['aws']) do unless Fog.mocking? - AWS[:iam].create_user('fog_access_key_tests') + Fog::AWS[:iam].create_user('fog_access_key_tests') end tests('success') do @@ -18,7 +18,7 @@ tests("#create_access_key('UserName' => 'fog_access_key_tests')").formats(@access_key_format) do pending if Fog.mocking? - data = AWS[:iam].create_access_key('UserName' => 'fog_access_key_tests').body + data = Fog::AWS[:iam].create_access_key('UserName' => 'fog_access_key_tests').body @access_key_id = data['AccessKey']['AccessKeyId'] data end @@ -34,17 +34,17 @@ tests("#list_access_keys('Username' => 'fog_access_key_tests')").formats(@access_keys_format) do pending if Fog.mocking? - AWS[:iam].list_access_keys('UserName' => 'fog_access_key_tests').body + Fog::AWS[:iam].list_access_keys('UserName' => 'fog_access_key_tests').body end tests("#update_access_key('#{@access_key_id}', 'Inactive', 'UserName' => 'fog_access_key_tests')").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? - AWS[:iam].update_access_key(@access_key_id, 'Inactive', 'UserName' => 'fog_access_key_tests').body + Fog::AWS[:iam].update_access_key(@access_key_id, 'Inactive', 'UserName' => 'fog_access_key_tests').body end tests("#delete_access_key('#{@access_key_id}', 'UserName' => 'fog_access_key_tests)").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? - AWS[:iam].delete_access_key(@access_key_id, 'UserName' => 'fog_access_key_tests').body + Fog::AWS[:iam].delete_access_key(@access_key_id, 'UserName' => 'fog_access_key_tests').body end end @@ -54,7 +54,7 @@ end unless Fog.mocking? - AWS[:iam].delete_user('fog_access_key_tests') + Fog::AWS[:iam].delete_user('fog_access_key_tests') end end \ No newline at end of file diff --git a/tests/aws/requests/iam/group_policy_tests.rb b/tests/aws/requests/iam/group_policy_tests.rb index b265a56dac..00d99a6f59 100644 --- a/tests/aws/requests/iam/group_policy_tests.rb +++ b/tests/aws/requests/iam/group_policy_tests.rb @@ -1,7 +1,7 @@ Shindo.tests('AWS::IAM | group policy requests', ['aws']) do unless Fog.mocking? - AWS[:iam].create_group('fog_group_policy_tests') + Fog::AWS[:iam].create_group('fog_group_policy_tests') end tests('success') do @@ -10,7 +10,7 @@ tests("#put_group_policy('fog_group_policy_tests', 'fog_policy', #{@policy.inspect})").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? - AWS[:iam].put_group_policy('fog_group_policy_tests', 'fog_policy', @policy).body + Fog::AWS[:iam].put_group_policy('fog_group_policy_tests', 'fog_policy', @policy).body end @group_policies_format = { @@ -21,12 +21,12 @@ tests("list_group_policies('fog_group_policy_tests')").formats(@group_policies_format) do pending if Fog.mocking? - AWS[:iam].list_group_policies('fog_group_policy_tests').body + Fog::AWS[:iam].list_group_policies('fog_group_policy_tests').body end tests("#delete_group_policy('fog_group_policy_tests', 'fog_policy')").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? - AWS[:iam].delete_group_policy('fog_group_policy_tests', 'fog_policy').body + Fog::AWS[:iam].delete_group_policy('fog_group_policy_tests', 'fog_policy').body end end @@ -36,7 +36,7 @@ end unless Fog.mocking? - AWS[:iam].delete_group('fog_group_policy_tests') + Fog::AWS[:iam].delete_group('fog_group_policy_tests') end end \ No newline at end of file diff --git a/tests/aws/requests/iam/group_tests.rb b/tests/aws/requests/iam/group_tests.rb index 9fda645362..6947b0de2f 100644 --- a/tests/aws/requests/iam/group_tests.rb +++ b/tests/aws/requests/iam/group_tests.rb @@ -14,7 +14,7 @@ tests("#create_group('fog_group')").formats(@group_format) do pending if Fog.mocking? - AWS[:iam].create_group('fog_group').body + Fog::AWS[:iam].create_group('fog_group').body end @groups_format = { @@ -30,12 +30,12 @@ tests("#list_groups").formats(@groups_format) do pending if Fog.mocking? - AWS[:iam].list_groups.body + Fog::AWS[:iam].list_groups.body end tests("#delete_group('fog_group')").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? - AWS[:iam].delete_group('fog_group').body + Fog::AWS[:iam].delete_group('fog_group').body end end diff --git a/tests/aws/requests/iam/login_profile_tests.rb b/tests/aws/requests/iam/login_profile_tests.rb index b860f646ec..5b0ed542f0 100644 --- a/tests/aws/requests/iam/login_profile_tests.rb +++ b/tests/aws/requests/iam/login_profile_tests.rb @@ -1,7 +1,7 @@ Shindo.tests('AWS::IAM | user requests', ['aws']) do unless Fog.mocking? - AWS[:iam].create_user('fog_user') + Fog::AWS[:iam].create_user('fog_user') end @@ -17,12 +17,12 @@ tests("#create_login_profile('fog_user')").formats(@login_profile_format) do pending if Fog.mocking? - AWS[:iam].create_login_profile('fog_user', 'somepassword').body + Fog::AWS[:iam].create_login_profile('fog_user', 'somepassword').body end tests("#get_login_profile('fog_user')").formats(@login_profile_format) do pending if Fog.mocking? - result = AWS[:iam].get_login_profile('fog_user').body + result = Fog::AWS[:iam].get_login_profile('fog_user').body returns('fog_user') {result['LoginProfile']['UserName']} result end @@ -30,7 +30,7 @@ tests("#update_login_profile('fog_user')").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? begin - AWS[:iam].update_login_profile('fog_user', 'otherpassword').body + Fog::AWS[:iam].update_login_profile('fog_user', 'otherpassword').body rescue Excon::Errors::Conflict #profile cannot be updated or deleted until it has finished creating; api provides no way of telling whether creation process complete sleep 5 retry @@ -39,12 +39,12 @@ tests("#delete_login_profile('fog_user')").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? - AWS[:iam].delete_login_profile('fog_user').body + Fog::AWS[:iam].delete_login_profile('fog_user').body end tests("#get_login_profile('fog_user')") do pending if Fog.mocking? - raises(Excon::Errors::NotFound) {AWS[:iam].get_login_profile('fog_user')} + raises(Excon::Errors::NotFound) {Fog::AWS[:iam].get_login_profile('fog_user')} end end @@ -52,13 +52,13 @@ tests('failure') do tests('get login profile for non existing user') do pending if Fog.mocking? - raises(Fog::AWS::IAM::NotFound) { AWS[:iam].get_login_profile('idontexist')} - raises(Fog::AWS::IAM::NotFound) { AWS[:iam].delete_login_profile('fog_user')} + raises(Fog::AWS::IAM::NotFound) { Fog::AWS[:iam].get_login_profile('idontexist')} + raises(Fog::AWS::IAM::NotFound) { Fog::AWS[:iam].delete_login_profile('fog_user')} end end unless Fog.mocking? - AWS[:iam].delete_user('fog_user') + Fog::AWS[:iam].delete_user('fog_user') end end diff --git a/tests/aws/requests/iam/server_certificate_tests.rb b/tests/aws/requests/iam/server_certificate_tests.rb index 479a4e20b8..229696f67e 100644 --- a/tests/aws/requests/iam/server_certificate_tests.rb +++ b/tests/aws/requests/iam/server_certificate_tests.rb @@ -1,17 +1,25 @@ Shindo.tests('AWS::IAM | server certificate requests', ['aws']) do @key_name = 'fog-test' + @key_name_chained = 'fog-test-chained' @certificate_format = { - 'Arn' => String, - 'Path' => String, - 'ServerCertificateId' => String, - 'ServerCertificateName' => String, - 'UploadDate' => Time + 'Arn' => String, + 'Path' => String, + 'ServerCertificateId' => String, + 'ServerCertificateName' => String, + 'UploadDate' => Time } @upload_format = { 'Certificate' => @certificate_format, 'RequestId' => String } + @get_server_certificate_format = { + 'Certificate' => @certificate_format, + 'RequestId' => String + } + @list_format = { + 'Certificates' => [@certificate_format] + } tests('#upload_server_certificate') do public_key = AWS::IAM::SERVER_CERT_PUBLIC_KEY @@ -19,44 +27,55 @@ private_key_mismatch = AWS::IAM::SERVER_CERT_PRIVATE_KEY_MISMATCHED tests('empty public key').raises(Fog::AWS::IAM::ValidationError) do - AWS[:iam].upload_server_certificate('', private_key, @key_name) + Fog::AWS::IAM.new.upload_server_certificate('', private_key, @key_name) end tests('empty private key').raises(Fog::AWS::IAM::ValidationError) do - AWS[:iam].upload_server_certificate(public_key, '', @key_name) + Fog::AWS::IAM.new.upload_server_certificate(public_key, '', @key_name) end tests('invalid public key').raises(Fog::AWS::IAM::MalformedCertificate) do - AWS[:iam].upload_server_certificate('abcde', private_key, @key_name) + Fog::AWS::IAM.new.upload_server_certificate('abcde', private_key, @key_name) end tests('invalid private key').raises(Fog::AWS::IAM::MalformedCertificate) do - AWS[:iam].upload_server_certificate(public_key, 'abcde', @key_name) + Fog::AWS::IAM.new.upload_server_certificate(public_key, 'abcde', @key_name) end tests('mismatched private key').raises(Fog::AWS::IAM::KeyPairMismatch) do - AWS[:iam].upload_server_certificate(public_key, private_key_mismatch, @key_name) + Fog::AWS::IAM.new.upload_server_certificate(public_key, private_key_mismatch, @key_name) end tests('format').formats(@upload_format) do - AWS[:iam].upload_server_certificate(public_key, private_key, @key_name).body + Fog::AWS::IAM.new.upload_server_certificate(public_key, private_key, @key_name).body + end + + tests('format with chain').formats(@upload_format) do + Fog::AWS::IAM.new.upload_server_certificate(public_key, private_key, @key_name_chained, { 'CertificateChain' => public_key }).body end tests('duplicate name').raises(Fog::AWS::IAM::EntityAlreadyExists) do - AWS[:iam].upload_server_certificate(public_key, private_key, @key_name) + Fog::AWS::IAM.new.upload_server_certificate(public_key, private_key, @key_name) end end - tests('#get_server_certificate').formats(@upload_format) do + tests('#get_server_certificate').formats(@get_server_certificate_format) do tests('raises NotFound').raises(Fog::AWS::IAM::NotFound) do - AWS[:iam].get_server_certificate("#{@key_name}fake") + Fog::AWS::IAM.new.get_server_certificate("#{@key_name}fake") end - AWS[:iam].get_server_certificate(@key_name).body + Fog::AWS::IAM.new.get_server_certificate(@key_name).body end - @list_format = { 'Certificates' => [@certificate_format] } tests('#list_server_certificates').formats(@list_format) do - result = AWS[:iam].list_server_certificates.body + result = Fog::AWS::IAM.new.list_server_certificates.body + tests('includes key name') do + returns(true) { result['Certificates'].any?{|c| c['ServerCertificateName'] == @key_name} } + end + result + end + + tests("#list_server_certificates('path-prefix' => '/'").formats(@list_format) do + result = Fog::AWS::IAM.new.list_server_certificates('PathPrefix' => '/').body tests('includes key name') do returns(true) { result['Certificates'].any?{|c| c['ServerCertificateName'] == @key_name} } end @@ -64,6 +83,8 @@ end tests('#delete_server_certificate').formats(AWS::IAM::Formats::BASIC) do - AWS[:iam].delete_server_certificate(@key_name).body + Fog::AWS::IAM.new.delete_server_certificate(@key_name).body end + + Fog::AWS::IAM.new.delete_server_certificate(@key_name_chained) end diff --git a/tests/aws/requests/iam/user_policy_tests.rb b/tests/aws/requests/iam/user_policy_tests.rb index bbc34ca6cb..3d72b267e5 100644 --- a/tests/aws/requests/iam/user_policy_tests.rb +++ b/tests/aws/requests/iam/user_policy_tests.rb @@ -1,7 +1,7 @@ Shindo.tests('AWS::IAM | user policy requests', ['aws']) do unless Fog.mocking? - AWS[:iam].create_user('fog_user_policy_tests') + Fog::AWS[:iam].create_user('fog_user_policy_tests') end tests('success') do @@ -10,7 +10,7 @@ tests("#put_user_policy('fog_user_policy_tests', 'fog_policy', #{@policy.inspect})").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? - AWS[:iam].put_user_policy('fog_user_policy_tests', 'fog_policy', @policy).body + Fog::AWS[:iam].put_user_policy('fog_user_policy_tests', 'fog_policy', @policy).body end @user_policies_format = { @@ -21,12 +21,12 @@ tests("list_user_policies('fog_user_policy_tests')").formats(@user_policies_format) do pending if Fog.mocking? - AWS[:iam].list_user_policies('fog_user_policy_tests').body + Fog::AWS[:iam].list_user_policies('fog_user_policy_tests').body end tests("#delete_user_policy('fog_user_policy_tests', 'fog_policy')").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? - AWS[:iam].delete_user_policy('fog_user_policy_tests', 'fog_policy').body + Fog::AWS[:iam].delete_user_policy('fog_user_policy_tests', 'fog_policy').body end end @@ -36,7 +36,7 @@ end unless Fog.mocking? - AWS[:iam].delete_user('fog_user_policy_tests') + Fog::AWS[:iam].delete_user('fog_user_policy_tests') end end \ No newline at end of file diff --git a/tests/aws/requests/iam/user_tests.rb b/tests/aws/requests/iam/user_tests.rb index d3d8ccea17..2d91eebb8d 100644 --- a/tests/aws/requests/iam/user_tests.rb +++ b/tests/aws/requests/iam/user_tests.rb @@ -1,7 +1,7 @@ Shindo.tests('AWS::IAM | user requests', ['aws']) do unless Fog.mocking? - AWS[:iam].create_group('fog_user_tests') + Fog::AWS[:iam].create_group('fog_user_tests') end tests('success') do @@ -18,7 +18,7 @@ tests("#create_user('fog_user')").formats(@user_format) do pending if Fog.mocking? - AWS[:iam].create_user('fog_user').body + Fog::AWS[:iam].create_user('fog_user').body end @users_format = { @@ -34,22 +34,22 @@ tests("#list_users").formats(@users_format) do pending if Fog.mocking? - AWS[:iam].list_users.body + Fog::AWS[:iam].list_users.body end tests("#add_user_to_group('fog_user_tests', 'fog_user')").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? - AWS[:iam].add_user_to_group('fog_user_tests', 'fog_user').body + Fog::AWS[:iam].add_user_to_group('fog_user_tests', 'fog_user').body end tests("#remove_user_from_group('fog_user_tests', 'fog_user')").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? - AWS[:iam].remove_user_from_group('fog_user_tests', 'fog_user').body + Fog::AWS[:iam].remove_user_from_group('fog_user_tests', 'fog_user').body end tests("#delete_user('fog_user')").formats(AWS::IAM::Formats::BASIC) do pending if Fog.mocking? - AWS[:iam].delete_user('fog_user').body + Fog::AWS[:iam].delete_user('fog_user').body end end @@ -59,7 +59,7 @@ end unless Fog.mocking? - AWS[:iam].delete_group('fog_user_tests') + Fog::AWS[:iam].delete_group('fog_user_tests') end end \ No newline at end of file diff --git a/tests/aws/requests/rds/instance_tests.rb b/tests/aws/requests/rds/instance_tests.rb index c3e6aaa0c5..4f8a664f5b 100644 --- a/tests/aws/requests/rds/instance_tests.rb +++ b/tests/aws/requests/rds/instance_tests.rb @@ -13,7 +13,7 @@ pending if Fog.mocking? tests("#create_db_instance").formats(AWS::RDS::Formats::CREATE_DB_INSTANCE) do - result = AWS[:rds].create_db_instance(@db_instance_id, 'AllocatedStorage' => 5, + result = Fog::AWS[:rds].create_db_instance(@db_instance_id, 'AllocatedStorage' => 5, 'DBInstanceClass' => 'db.m1.small', 'Engine' => 'mysql', 'EngineVersion' => '5.1.50', @@ -27,15 +27,15 @@ end tests("#describe_db_instances").formats(AWS::RDS::Formats::DESCRIBE_DB_INSTANCES) do - AWS[:rds].describe_db_instances.body + Fog::AWS[:rds].describe_db_instances.body end - server = AWS[:rds].servers.get(@db_instance_id) + server = Fog::AWS[:rds].servers.get(@db_instance_id) server.wait_for {ready?} new_storage = 6 tests("#modify_db_instance with immediate apply").formats(AWS::RDS::Formats::MODIFY_DB_INSTANCE) do - body = AWS[:rds].modify_db_instance(@db_instance_id, true, 'AllocatedStorage'=> new_storage).body + body = Fog::AWS[:rds].modify_db_instance(@db_instance_id, true, 'AllocatedStorage'=> new_storage).body tests 'pending storage' do instance = body['ModifyDBInstanceResult']['DBInstance'] returns(new_storage){instance['PendingModifiedValues']['AllocatedStorage']} @@ -52,7 +52,7 @@ tests("reboot db instance") do tests("#reboot").formats(AWS::RDS::Formats::REBOOT_DB_INSTANCE) do - AWS[:rds].reboot_db_instance(@db_instance_id).body + Fog::AWS[:rds].reboot_db_instance(@db_instance_id).body end end @@ -60,22 +60,22 @@ server.reload.wait_for { state == 'available'} tests("#create_db_snapshot").formats(AWS::RDS::Formats::CREATE_DB_SNAPSHOT) do - body = AWS[:rds].create_db_snapshot(@db_instance_id, @db_snapshot_id).body + body = Fog::AWS[:rds].create_db_snapshot(@db_instance_id, @db_snapshot_id).body returns('creating'){ body['CreateDBSnapshotResult']['DBSnapshot']['Status']} body end tests("#describe_db_snapshots").formats(AWS::RDS::Formats::DESCRIBE_DB_SNAPSHOTS) do - body = AWS[:rds].describe_db_snapshots.body + body = Fog::AWS[:rds].describe_db_snapshots.body end server.reload.wait_for { state == 'available' } tests( "#create read replica").formats(AWS::RDS::Formats::CREATE_READ_REPLICA) do - AWS[:rds].create_db_instance_read_replica(@db_replica_id, @db_instance_id).body + Fog::AWS[:rds].create_db_instance_read_replica(@db_replica_id, @db_instance_id).body end - replica = AWS[:rds].servers.get(@db_replica_id) + replica = Fog::AWS[:rds].servers.get(@db_replica_id) replica.wait_for {ready?} tests("replica source") do @@ -89,25 +89,25 @@ tests("#delete_db_instance").formats(AWS::RDS::Formats::DELETE_DB_INSTANCE) do #server.wait_for { state == 'available'} - AWS[:rds].delete_db_instance(@db_replica_id, nil, true) - body = AWS[:rds].delete_db_instance(@db_instance_id, @db_final_snapshot_id).body + Fog::AWS[:rds].delete_db_instance(@db_replica_id, nil, true) + body = Fog::AWS[:rds].delete_db_instance(@db_instance_id, @db_final_snapshot_id).body tests "final snapshot" do - returns('creating'){AWS[:rds].describe_db_snapshots(:snapshot_id => @db_final_snapshot_id).body['DescribeDBSnapshotsResult']['DBSnapshots'].first['Status']} + returns('creating'){Fog::AWS[:rds].describe_db_snapshots(:snapshot_id => @db_final_snapshot_id).body['DescribeDBSnapshotsResult']['DBSnapshots'].first['Status']} end body end tests("#delete_db_snapshot").formats(AWS::RDS::Formats::DELETE_DB_SNAPSHOT) do - AWS[:rds].snapshots.get(@db_snapshot_id).wait_for { ready? } - AWS[:rds].delete_db_snapshot(@db_snapshot_id).body + Fog::AWS[:rds].snapshots.get(@db_snapshot_id).wait_for { ready? } + Fog::AWS[:rds].delete_db_snapshot(@db_snapshot_id).body end tests("snapshot.destroy") do - snapshot = AWS[:rds].snapshots.get(@db_final_snapshot_id) + snapshot = Fog::AWS[:rds].snapshots.get(@db_final_snapshot_id) snapshot.wait_for { ready? } snapshot.destroy - returns(nil) { AWS[:rds].snapshots.get(@db_final_snapshot_id) } + returns(nil) { Fog::AWS[:rds].snapshots.get(@db_final_snapshot_id) } end end @@ -116,13 +116,13 @@ pending if Fog.mocking? tests "deleting nonexisting instance" do - raises(Fog::AWS::RDS::NotFound) {AWS[:rds].delete_db_instance('doesnexist', 'irrelevant')} + raises(Fog::AWS::RDS::NotFound) {Fog::AWS[:rds].delete_db_instance('doesnexist', 'irrelevant')} end tests "deleting non existing snapshot" do - raises(Fog::AWS::RDS::NotFound) {AWS[:rds].delete_db_snapshot('doesntexist')} + raises(Fog::AWS::RDS::NotFound) {Fog::AWS[:rds].delete_db_snapshot('doesntexist')} end tests "modifying non existing instance" do - raises(Fog::AWS::RDS::NotFound) { AWS[:rds].modify_db_instance 'doesntexit', true, 'AllocatedStorage'=> 10} + raises(Fog::AWS::RDS::NotFound) { Fog::AWS[:rds].modify_db_instance 'doesntexit', true, 'AllocatedStorage'=> 10} end end end diff --git a/tests/aws/requests/rds/parameter_group_tests.rb b/tests/aws/requests/rds/parameter_group_tests.rb index 73526f0e81..9654a38035 100644 --- a/tests/aws/requests/rds/parameter_group_tests.rb +++ b/tests/aws/requests/rds/parameter_group_tests.rb @@ -4,7 +4,7 @@ tests("#create_db_parameter_groups").formats(AWS::RDS::Formats::CREATE_DB_PARAMETER_GROUP) do pending if Fog.mocking? - body = AWS[:rds].create_db_parameter_group('fog-group', 'MySQL5.1', 'Some description').body + body = Fog::AWS[:rds].create_db_parameter_group('fog-group', 'MySQL5.1', 'Some description').body returns( 'mysql5.1') { body['CreateDBParameterGroupResult']['DBParameterGroup']['DBParameterGroupFamily']} returns( 'fog-group') { body['CreateDBParameterGroupResult']['DBParameterGroup']['DBParameterGroupName']} @@ -13,12 +13,12 @@ body end - AWS[:rds].create_db_parameter_group('other-fog-group', 'MySQL5.1', 'Some description') + Fog::AWS[:rds].create_db_parameter_group('other-fog-group', 'MySQL5.1', 'Some description') tests("#describe_db_parameter_groups").formats(AWS::RDS::Formats::DESCRIBE_DB_PARAMETER_GROUP) do pending if Fog.mocking? - body = AWS[:rds].describe_db_parameter_groups().body + body = Fog::AWS[:rds].describe_db_parameter_groups().body returns(3) {body['DescribeDBParameterGroupsResult']['DBParameterGroups'].length} body @@ -27,7 +27,7 @@ tests("#describe_db_parameter_groups('fog-group)").formats(AWS::RDS::Formats::DESCRIBE_DB_PARAMETER_GROUP) do pending if Fog.mocking? - body = AWS[:rds].describe_db_parameter_groups('fog-group').body + body = Fog::AWS[:rds].describe_db_parameter_groups('fog-group').body returns(1) {body['DescribeDBParameterGroupsResult']['DBParameterGroups'].length} @@ -41,27 +41,27 @@ tests("delete_db_parameter_group").formats(AWS::RDS::Formats::BASIC) do pending if Fog.mocking? - body = AWS[:rds].delete_db_parameter_group('fog-group').body + body = Fog::AWS[:rds].delete_db_parameter_group('fog-group').body - raises(Fog::AWS::RDS::NotFound) {AWS[:rds].describe_db_parameter_groups('fog-group')} + raises(Fog::AWS::RDS::NotFound) {Fog::AWS[:rds].describe_db_parameter_groups('fog-group')} body end - AWS[:rds].delete_db_parameter_group('other-fog-group') + Fog::AWS[:rds].delete_db_parameter_group('other-fog-group') end tests("failures") do pending if Fog.mocking? - raises(Fog::AWS::RDS::NotFound) {AWS[:rds].describe_db_parameter_groups('doesntexist')} - raises(Fog::AWS::RDS::NotFound) {AWS[:rds].delete_db_parameter_group('doesntexist')} + raises(Fog::AWS::RDS::NotFound) {Fog::AWS[:rds].describe_db_parameter_groups('doesntexist')} + raises(Fog::AWS::RDS::NotFound) {Fog::AWS[:rds].delete_db_parameter_group('doesntexist')} tests "creating second group with same id" do - AWS[:rds].create_db_parameter_group('fog-group', 'MySQL5.1', 'Some description') - raises(Fog::AWS::RDS::IdentifierTaken) {AWS[:rds].create_db_parameter_group('fog-group', 'MySQL5.1', 'Some description')} + Fog::AWS[:rds].create_db_parameter_group('fog-group', 'MySQL5.1', 'Some description') + raises(Fog::AWS::RDS::IdentifierTaken) {Fog::AWS[:rds].create_db_parameter_group('fog-group', 'MySQL5.1', 'Some description')} end - AWS[:rds].delete_db_parameter_group('fog-group') + Fog::AWS[:rds].delete_db_parameter_group('fog-group') end diff --git a/tests/aws/requests/rds/parameter_request_tests.rb b/tests/aws/requests/rds/parameter_request_tests.rb index 720ef535d0..3801391c5f 100644 --- a/tests/aws/requests/rds/parameter_request_tests.rb +++ b/tests/aws/requests/rds/parameter_request_tests.rb @@ -2,10 +2,10 @@ tests('success') do pending if Fog.mocking? - AWS[:rds].create_db_parameter_group('fog-group', 'MySQL5.1', 'Some description') + Fog::AWS[:rds].create_db_parameter_group('fog-group', 'MySQL5.1', 'Some description') tests('#modify_db_parameter_group').formats(AWS::RDS::Formats::MODIFY_PARAMETER_GROUP) do - body = AWS[:rds].modify_db_parameter_group('fog-group',[ + body = Fog::AWS[:rds].modify_db_parameter_group('fog-group',[ {'ParameterName' => 'query_cache_size', 'ParameterValue' => '12345', 'ApplyMethod' => 'immediate'} @@ -15,12 +15,12 @@ end tests('#describe_db_parameters').formats(AWS::RDS::Formats::DESCRIBE_DB_PARAMETERS) do - AWS[:rds].describe_db_parameters('fog-group', :max_records => 20).body + Fog::AWS[:rds].describe_db_parameters('fog-group', :max_records => 20).body end tests("#describe_db_parameters :source => 'user'")do - body = AWS[:rds].describe_db_parameters('fog-group', :source => 'user').body + body = Fog::AWS[:rds].describe_db_parameters('fog-group', :source => 'user').body returns(1){ body['DescribeDBParametersResult']['Parameters'].length} param = body['DescribeDBParametersResult']['Parameters'].first @@ -29,7 +29,7 @@ returns(true){param['IsModifiable']} returns('query_cache_size'){param['ParameterName']} end - AWS[:rds].delete_db_parameter_group('fog-group') + Fog::AWS[:rds].delete_db_parameter_group('fog-group') end end diff --git a/tests/aws/requests/ses/verified_email_address_tests.rb b/tests/aws/requests/ses/verified_email_address_tests.rb index 6bfd077b7a..3a972f8227 100644 --- a/tests/aws/requests/ses/verified_email_address_tests.rb +++ b/tests/aws/requests/ses/verified_email_address_tests.rb @@ -4,18 +4,18 @@ tests("#verify_email_address('test@example.com')").formats(AWS::SES::Formats::BASIC) do pending if Fog.mocking? - AWS[:ses].verify_email_address('test@example.com').body + Fog::AWS[:ses].verify_email_address('test@example.com').body end tests("#list_verified_email_addresses").formats(AWS::SES::Formats::BASIC.merge('VerifiedEmailAddresses' => [String])) do pending if Fog.mocking? - AWS[:ses].list_verified_email_addresses.body + Fog::AWS[:ses].list_verified_email_addresses.body end # email won't be there to delete, but succeeds regardless tests("#delete_verified_email_address('test@example.com')").formats(AWS::SES::Formats::BASIC) do pending if Fog.mocking? - AWS[:ses].delete_verified_email_address('notaanemail@example.com').body + Fog::AWS[:ses].delete_verified_email_address('notaanemail@example.com').body end end diff --git a/tests/aws/requests/simpledb/attributes_tests.rb b/tests/aws/requests/simpledb/attributes_tests.rb index 48aa6e01d1..ad8edc8dd7 100644 --- a/tests/aws/requests/simpledb/attributes_tests.rb +++ b/tests/aws/requests/simpledb/attributes_tests.rb @@ -2,57 +2,57 @@ @domain_name = "fog_domain_#{Time.now.to_f.to_s.gsub('.','')}" - AWS[:sdb].create_domain(@domain_name) + Fog::AWS[:simpledb].create_domain(@domain_name) tests('success') do tests("#batch_put_attributes('#{@domain_name}', { 'a' => { 'b' => 'c', 'd' => 'e' }, 'x' => { 'y' => 'z' } }).body").formats(AWS::SimpleDB::Formats::BASIC) do - AWS[:sdb].batch_put_attributes(@domain_name, { 'a' => { 'b' => 'c', 'd' => 'e' }, 'x' => { 'y' => 'z' } }).body + Fog::AWS[:simpledb].batch_put_attributes(@domain_name, { 'a' => { 'b' => 'c', 'd' => 'e' }, 'x' => { 'y' => 'z' } }).body end tests("#get_attributes('#{@domain_name}', 'a', {'ConsistentRead' => true}).body['Attributes']").returns({'b' => ['c'], 'd' => ['e']}) do - AWS[:sdb].get_attributes(@domain_name, 'a', {'ConsistentRead' => true}).body['Attributes'] + Fog::AWS[:simpledb].get_attributes(@domain_name, 'a', {'ConsistentRead' => true}).body['Attributes'] end tests("#get_attributes('#{@domain_name}', 'AttributeName' => 'notanattribute')").succeeds do - AWS[:sdb].get_attributes(@domain_name, 'AttributeName' => 'notanattribute') + Fog::AWS[:simpledb].get_attributes(@domain_name, 'AttributeName' => 'notanattribute') end tests("#select('select * from #{@domain_name}', {'ConsistentRead' => true}).body['Items']").returns({'a' => { 'b' => ['c'], 'd' => ['e']}, 'x' => { 'y' => ['z'] } }) do pending if Fog.mocking? - AWS[:sdb].select("select * from #{@domain_name}", {'ConsistentRead' => true}).body['Items'] + Fog::AWS[:simpledb].select("select * from #{@domain_name}", {'ConsistentRead' => true}).body['Items'] end tests("#put_attributes('#{@domain_name}', 'conditional', { 'version' => '1' }).body").formats(AWS::SimpleDB::Formats::BASIC) do - AWS[:sdb].put_attributes(@domain_name, 'conditional', { 'version' => '1' }).body + Fog::AWS[:simpledb].put_attributes(@domain_name, 'conditional', { 'version' => '1' }).body end tests("#put_attributes('#{@domain_name}', 'conditional', { 'version' => '2' }, :expect => { 'version' => '1' }, :replace => ['version']).body").formats(AWS::SimpleDB::Formats::BASIC) do - AWS[:sdb].put_attributes(@domain_name, 'conditional', { 'version' => '2' }, :expect => { 'version' => '1' }, :replace => ['version']).body + Fog::AWS[:simpledb].put_attributes(@domain_name, 'conditional', { 'version' => '2' }, :expect => { 'version' => '1' }, :replace => ['version']).body end # Verify that we can delete individual attributes. tests("#delete_attributes('#{@domain_name}', 'a', {'d' => []})").succeeds do - AWS[:sdb].delete_attributes(@domain_name, 'a', {'d' => []}).body + Fog::AWS[:simpledb].delete_attributes(@domain_name, 'a', {'d' => []}).body end # Verify that individually deleted attributes are actually removed. tests("#get_attributes('#{@domain_name}', 'a', {'AttributeName' => ['d'], 'ConsistentRead' => true}).body['Attributes']").returns({}) do - AWS[:sdb].get_attributes(@domain_name, 'a', {'AttributeName' => ['d'], 'ConsistentRead' => true}).body['Attributes'] + Fog::AWS[:simpledb].get_attributes(@domain_name, 'a', {'AttributeName' => ['d'], 'ConsistentRead' => true}).body['Attributes'] end tests("#delete_attributes('#{@domain_name}', 'a').body").formats(AWS::SimpleDB::Formats::BASIC) do - AWS[:sdb].delete_attributes(@domain_name, 'a').body + Fog::AWS[:simpledb].delete_attributes(@domain_name, 'a').body end # Verify that we can delete entire domain, item combinations. tests("#delete_attributes('#{@domain_name}', 'a').body").succeeds do - AWS[:sdb].delete_attributes(@domain_name, 'a').body + Fog::AWS[:simpledb].delete_attributes(@domain_name, 'a').body end # Verify that deleting a domain, item combination removes all related attributes. tests("#get_attributes('#{@domain_name}', 'a', {'ConsistentRead' => true}).body['Attributes']").returns({}) do - AWS[:sdb].get_attributes(@domain_name, 'a', {'ConsistentRead' => true}).body['Attributes'] + Fog::AWS[:simpledb].get_attributes(@domain_name, 'a', {'ConsistentRead' => true}).body['Attributes'] end end @@ -60,27 +60,27 @@ tests('failure') do tests("#batch_put_attributes('notadomain', { 'a' => { 'b' => 'c' }, 'x' => { 'y' => 'z' } })").raises(Excon::Errors::BadRequest) do - AWS[:sdb].batch_put_attributes('notadomain', { 'a' => { 'b' => 'c' }, 'x' => { 'y' => 'z' } }) + Fog::AWS[:simpledb].batch_put_attributes('notadomain', { 'a' => { 'b' => 'c' }, 'x' => { 'y' => 'z' } }) end tests("#get_attributes('notadomain', 'a')").raises(Excon::Errors::BadRequest) do - AWS[:sdb].get_attributes('notadomain', 'a') + Fog::AWS[:simpledb].get_attributes('notadomain', 'a') end tests("#put_attributes('notadomain', 'conditional', { 'version' => '1' })").raises(Excon::Errors::BadRequest) do - AWS[:sdb].put_attributes('notadomain', 'foo', { 'version' => '1' }) + Fog::AWS[:simpledb].put_attributes('notadomain', 'foo', { 'version' => '1' }) end tests("#put_attributes('#{@domain_name}', 'conditional', { 'version' => '2' }, :expect => { 'version' => '1' }, :replace => ['version'])").raises(Excon::Errors::Conflict) do - AWS[:sdb].put_attributes(@domain_name, 'conditional', { 'version' => '2' }, :expect => { 'version' => '1' }, :replace => ['version']) + Fog::AWS[:simpledb].put_attributes(@domain_name, 'conditional', { 'version' => '2' }, :expect => { 'version' => '1' }, :replace => ['version']) end tests("#delete_attributes('notadomain', 'a')").raises(Excon::Errors::BadRequest) do - AWS[:sdb].delete_attributes('notadomain', 'a') + Fog::AWS[:simpledb].delete_attributes('notadomain', 'a') end end - AWS[:sdb].delete_domain(@domain_name) + Fog::AWS[:simpledb].delete_domain(@domain_name) end diff --git a/tests/aws/requests/simpledb/domain_tests.rb b/tests/aws/requests/simpledb/domain_tests.rb index fbabf298a2..f99a56bf21 100644 --- a/tests/aws/requests/simpledb/domain_tests.rb +++ b/tests/aws/requests/simpledb/domain_tests.rb @@ -15,27 +15,27 @@ tests('success') do tests("#create_domain(#{@domain_name})").formats(AWS::SimpleDB::Formats::BASIC) do - AWS[:sdb].create_domain(@domain_name).body + Fog::AWS[:simpledb].create_domain(@domain_name).body end tests("#create_domain(#{@domain_name})").succeeds do - AWS[:sdb].create_domain(@domain_name) + Fog::AWS[:simpledb].create_domain(@domain_name) end tests("#domain_metadata(#{@domain_name})").formats(@domain_metadata_format) do - AWS[:sdb].domain_metadata(@domain_name).body + Fog::AWS[:simpledb].domain_metadata(@domain_name).body end tests("#list_domains").formats(AWS::SimpleDB::Formats::BASIC.merge('Domains' => [String])) do - AWS[:sdb].list_domains.body + Fog::AWS[:simpledb].list_domains.body end tests("#delete_domain(#{@domain_name})").formats(AWS::SimpleDB::Formats::BASIC) do - AWS[:sdb].delete_domain(@domain_name).body + Fog::AWS[:simpledb].delete_domain(@domain_name).body end tests("#delete_domain(#{@domain_name})").succeeds do - AWS[:sdb].delete_domain(@domain_name) + Fog::AWS[:simpledb].delete_domain(@domain_name) end end @@ -43,7 +43,7 @@ tests('failure') do tests("#domain_metadata('notadomain')").raises(Excon::Errors::BadRequest) do - AWS[:sdb].domain_metadata('notadomain') + Fog::AWS[:simpledb].domain_metadata('notadomain') end end diff --git a/tests/aws/requests/sns/subscription_tests.rb b/tests/aws/requests/sns/subscription_tests.rb index 028eccece0..ab26df2f39 100644 --- a/tests/aws/requests/sns/subscription_tests.rb +++ b/tests/aws/requests/sns/subscription_tests.rb @@ -1,10 +1,10 @@ Shindo.tests('AWS::SES | topic lifecycle tests', ['aws', 'sns']) do unless Fog.mocking? - @topic_arn = AWS[:sns].create_topic('fog_subscription_tests').body['TopicArn'] - @queue_url = AWS[:sqs].create_queue('fog_subscription_tests').body['QueueUrl'] - @queue_arn = AWS[:sqs].get_queue_attributes(@queue_url, 'QueueArn').body['Attributes']['QueueArn'] - AWS[:sqs].set_queue_attributes( + @topic_arn = Fog::AWS[:sns].create_topic('fog_subscription_tests').body['TopicArn'] + @queue_url = Fog::AWS[:sqs].create_queue('fog_subscription_tests').body['QueueUrl'] + @queue_arn = Fog::AWS[:sqs].get_queue_attributes(@queue_url, 'QueueArn').body['Attributes']['QueueArn'] + Fog::AWS[:sqs].set_queue_attributes( @queue_url, 'Policy', MultiJson.encode({ @@ -28,7 +28,7 @@ tests("#subscribe('#{@topic_arn}', '#{@queue_arn}', 'sqs')").formats(AWS::SNS::Formats::BASIC.merge('SubscriptionArn' => String)) do pending if Fog.mocking? - body = AWS[:sns].subscribe(@topic_arn, @queue_arn, 'sqs').body + body = Fog::AWS[:sns].subscribe(@topic_arn, @queue_arn, 'sqs').body @subscription_arn = body['SubscriptionArn'] body end @@ -45,31 +45,31 @@ tests("#list_subscriptions").formats(list_subscriptions_format) do pending if Fog.mocking? - AWS[:sns].list_subscriptions.body + Fog::AWS[:sns].list_subscriptions.body end tests("#list_subscriptions_by_topic('#{@topic_arn}')").formats(list_subscriptions_format) do pending if Fog.mocking? - body = AWS[:sns].list_subscriptions_by_topic(@topic_arn).body + body = Fog::AWS[:sns].list_subscriptions_by_topic(@topic_arn).body end tests("#publish('#{@topic_arn}', 'message')").formats(AWS::SNS::Formats::BASIC.merge('MessageId' => String)) do pending if Fog.mocking? - body = AWS[:sns].publish(@topic_arn, 'message').body + body = Fog::AWS[:sns].publish(@topic_arn, 'message').body end tests("#receive_message('#{@queue_url}')...").returns('message') do pending if Fog.mocking? message = nil Fog.wait_for do - message = AWS[:sqs].receive_message(@queue_url).body['Message'].first + message = Fog::AWS[:sqs].receive_message(@queue_url).body['Message'].first end MultiJson.decode(message['Body'])['Message'] end tests("#unsubscribe('#{@subscription_arn}')").formats(AWS::SNS::Formats::BASIC) do pending if Fog.mocking? - AWS[:sns].unsubscribe(@subscription_arn).body + Fog::AWS[:sns].unsubscribe(@subscription_arn).body end end @@ -79,8 +79,8 @@ end unless Fog.mocking? - AWS[:sns].delete_topic(@topic_arn) - AWS[:sqs].delete_queue(@queue_url) + Fog::AWS[:sns].delete_topic(@topic_arn) + Fog::AWS[:sqs].delete_queue(@queue_url) end end diff --git a/tests/aws/requests/sns/topic_tests.rb b/tests/aws/requests/sns/topic_tests.rb index 6727fcf7b2..e966501116 100644 --- a/tests/aws/requests/sns/topic_tests.rb +++ b/tests/aws/requests/sns/topic_tests.rb @@ -4,19 +4,19 @@ tests("#create_topic('fog_topic_tests')").formats(AWS::SNS::Formats::BASIC.merge('TopicArn' => String)) do pending if Fog.mocking? - body = AWS[:sns].create_topic('fog_topic_tests').body + body = Fog::AWS[:sns].create_topic('fog_topic_tests').body @topic_arn = body["TopicArn"] body end tests("#list_topics").formats(AWS::SNS::Formats::BASIC.merge('Topics' => [String])) do pending if Fog.mocking? - AWS[:sns].list_topics.body + Fog::AWS[:sns].list_topics.body end tests("#set_topic_attributes('#{@topic_arn}', 'DisplayName', 'other-fog_topic_tests')").formats(AWS::SNS::Formats::BASIC) do pending if Fog.mocking? - AWS[:sns].set_topic_attributes(@topic_arn, 'DisplayName', 'other-fog_topic_tests').body + Fog::AWS[:sns].set_topic_attributes(@topic_arn, 'DisplayName', 'other-fog_topic_tests').body end get_topic_attributes_format = AWS::SNS::Formats::BASIC.merge({ @@ -33,12 +33,12 @@ tests("#get_topic_attributes('#{@topic_arn})").formats(get_topic_attributes_format) do pending if Fog.mocking? - AWS[:sns].get_topic_attributes(@topic_arn).body + Fog::AWS[:sns].get_topic_attributes(@topic_arn).body end tests("#delete_topic('#{@topic_arn}')").formats(AWS::SNS::Formats::BASIC) do pending if Fog.mocking? - AWS[:sns].delete_topic(@topic_arn).body + Fog::AWS[:sns].delete_topic(@topic_arn).body end end diff --git a/tests/aws/requests/sqs/message_tests.rb b/tests/aws/requests/sqs/message_tests.rb index 63092f81b2..7584bf2b8e 100644 --- a/tests/aws/requests/sqs/message_tests.rb +++ b/tests/aws/requests/sqs/message_tests.rb @@ -2,9 +2,7 @@ tests('success') do - unless Fog.mocking? - @queue_url = AWS[:sqs].create_queue('fog_message_tests').body['QueueUrl'] - end + @queue_url = Fog::AWS[:sqs].create_queue('fog_message_tests').body['QueueUrl'] send_message_format = AWS::SQS::Formats::BASIC.merge({ 'MessageId' => String, @@ -12,8 +10,7 @@ }) tests("#send_message('#{@queue_url}', 'message')").formats(send_message_format) do - pending if Fog.mocking? - AWS[:sqs].send_message(@queue_url, 'message').body + Fog::AWS[:sqs].send_message(@queue_url, 'message').body end receive_message_format = AWS::SQS::Formats::BASIC.merge({ @@ -32,24 +29,21 @@ }) tests("#receive_message").formats(receive_message_format) do - pending if Fog.mocking? - data = AWS[:sqs].receive_message(@queue_url).body + data = Fog::AWS[:sqs].receive_message(@queue_url).body @receipt_handle = data['Message'].first['ReceiptHandle'] data end tests("#change_message_visibility('#{@queue_url}, '#{@receipt_handle}', 60)").formats(AWS::SQS::Formats::BASIC) do - pending if Fog.mocking? - AWS[:sqs].change_message_visibility(@queue_url, @receipt_handle, 60).body + Fog::AWS[:sqs].change_message_visibility(@queue_url, @receipt_handle, 60).body end tests("#delete_message('#{@queue_url}', '#{@receipt_handle}')").formats(AWS::SQS::Formats::BASIC) do - pending if Fog.mocking? - AWS[:sqs].delete_message(@queue_url, @receipt_handle).body + Fog::AWS[:sqs].delete_message(@queue_url, @receipt_handle).body end unless Fog.mocking? - AWS[:sqs].delete_queue(@queue_url) + Fog::AWS[:sqs].delete_queue(@queue_url) end end diff --git a/tests/aws/requests/sqs/queue_tests.rb b/tests/aws/requests/sqs/queue_tests.rb index 6ba418e0d1..d06daa49f6 100644 --- a/tests/aws/requests/sqs/queue_tests.rb +++ b/tests/aws/requests/sqs/queue_tests.rb @@ -7,8 +7,7 @@ }) tests("#create_queue('fog_queue_tests')").formats(create_queue_format) do - pending if Fog.mocking? - data = AWS[:sqs].create_queue('fog_queue_tests').body + data = Fog::AWS[:sqs].create_queue('fog_queue_tests').body @queue_url = data['QueueUrl'] data end @@ -18,13 +17,11 @@ }) tests("#list_queues").formats(list_queues_format) do - pending if Fog.mocking? - AWS[:sqs].list_queues.body + Fog::AWS[:sqs].list_queues.body end tests("#set_queue_attributes('#{@queue_url}', 'VisibilityTimeout', 60)").formats(AWS::SQS::Formats::BASIC) do - pending if Fog.mocking? - AWS[:sqs].set_queue_attributes(@queue_url, 'VisibilityTimeout', 60).body + Fog::AWS[:sqs].set_queue_attributes(@queue_url, 'VisibilityTimeout', 60).body end get_queue_attributes_format = AWS::SQS::Formats::BASIC.merge({ @@ -41,13 +38,11 @@ }) tests("#get_queue_attributes('#{@queue_url}', 'All')").formats(get_queue_attributes_format) do - pending if Fog.mocking? - AWS[:sqs].get_queue_attributes(@queue_url, 'All').body + Fog::AWS[:sqs].get_queue_attributes(@queue_url, 'All').body end tests("#delete_queue('#{@queue_url}')").formats(AWS::SQS::Formats::BASIC) do - pending if Fog.mocking? - AWS[:sqs].delete_queue(@queue_url).body + Fog::AWS[:sqs].delete_queue(@queue_url).body end end diff --git a/tests/aws/requests/storage/bucket_tests.rb b/tests/aws/requests/storage/bucket_tests.rb index ce12067ced..c0ad578f36 100644 --- a/tests/aws/requests/storage/bucket_tests.rb +++ b/tests/aws/requests/storage/bucket_tests.rb @@ -1,4 +1,5 @@ Shindo.tests('Fog::Storage[:aws] | bucket requests', [:aws]) do + @aws_bucket_name = 'fogbuckettests-' + Time.now.to_i.to_s(32) tests('success') do @@ -33,30 +34,31 @@ } } - tests("#put_bucket('fogbuckettests')").succeeds do - Fog::Storage[:aws].put_bucket('fogbuckettests') + tests("#put_bucket('#{@aws_bucket_name}')").succeeds do + Fog::Storage[:aws].put_bucket(@aws_bucket_name) + @aws_owner = Fog::Storage[:aws].get_bucket_acl(Fog::Storage[:aws].directories.first.key).body['Owner'] end tests("#get_service").formats(@service_format) do Fog::Storage[:aws].get_service.body end - file = Fog::Storage[:aws].directories.get('fogbuckettests').files.create(:body => 'y', :key => 'x') + file = Fog::Storage[:aws].directories.get(@aws_bucket_name).files.create(:body => 'y', :key => 'x') - tests("#get_bucket('fogbuckettests)").formats(@bucket_format) do - Fog::Storage[:aws].get_bucket('fogbuckettests').body + tests("#get_bucket('#{@aws_bucket_name}')").formats(@bucket_format) do + Fog::Storage[:aws].get_bucket(@aws_bucket_name).body end file.destroy - file1 = Fog::Storage[:aws].directories.get('fogbuckettests').files.create(:body => 'a', :key => 'a/a1/file1') - file2 = Fog::Storage[:aws].directories.get('fogbuckettests').files.create(:body => 'ab', :key => 'a/file2') - file3 = Fog::Storage[:aws].directories.get('fogbuckettests').files.create(:body => 'abc', :key => 'b/file3') - file4 = Fog::Storage[:aws].directories.get('fogbuckettests').files.create(:body => 'abcd', :key => 'file4') + file1 = Fog::Storage[:aws].directories.get(@aws_bucket_name).files.create(:body => 'a', :key => 'a/a1/file1') + file2 = Fog::Storage[:aws].directories.get(@aws_bucket_name).files.create(:body => 'ab', :key => 'a/file2') + file3 = Fog::Storage[:aws].directories.get(@aws_bucket_name).files.create(:body => 'abc', :key => 'b/file3') + file4 = Fog::Storage[:aws].directories.get(@aws_bucket_name).files.create(:body => 'abcd', :key => 'file4') - tests("#get_bucket('fogbuckettests')") do + tests("#get_bucket('#{@aws_bucket_name}')") do before do - @bucket = Fog::Storage[:aws].get_bucket('fogbuckettests') + @bucket = Fog::Storage[:aws].get_bucket(@aws_bucket_name) end tests(".body['Contents'].map{|n| n['Key']}").returns(["a/a1/file1", "a/file2", "b/file3", "file4"]) do @@ -72,9 +74,9 @@ end end - tests("#get_bucket('fogbuckettests', 'delimiter' => '/')") do + tests("#get_bucket('#{@aws_bucket_name}', 'delimiter' => '/')") do before do - @bucket = Fog::Storage[:aws].get_bucket('fogbuckettests', 'delimiter' => '/') + @bucket = Fog::Storage[:aws].get_bucket(@aws_bucket_name, 'delimiter' => '/') end tests(".body['Contents'].map{|n| n['Key']}").returns(['file4']) do @@ -86,9 +88,9 @@ end end - tests("#get_bucket('fogbuckettests', 'delimiter' => '/', 'prefix' => 'a/')") do + tests("#get_bucket('#{@aws_bucket_name}', 'delimiter' => '/', 'prefix' => 'a/')") do before do - @bucket = Fog::Storage[:aws].get_bucket('fogbuckettests', 'delimiter' => '/', 'prefix' => 'a/') + @bucket = Fog::Storage[:aws].get_bucket(@aws_bucket_name, 'delimiter' => '/', 'prefix' => 'a/') end tests(".body['Contents'].map{|n| n['Key']}").returns(['a/file2']) do @@ -102,63 +104,87 @@ file1.destroy; file2.destroy; file3.destroy; file4.destroy - tests("#get_bucket_location('fogbuckettests)").formats('LocationConstraint' => NilClass) do - Fog::Storage[:aws].get_bucket_location('fogbuckettests').body + tests("#get_bucket_location('#{@aws_bucket_name}')").formats('LocationConstraint' => NilClass) do + Fog::Storage[:aws].get_bucket_location(@aws_bucket_name).body end - tests("#get_request_payment('fogbuckettests')").formats('Payer' => String) do - Fog::Storage[:aws].get_request_payment('fogbuckettests').body + tests("#get_request_payment('#{@aws_bucket_name}')").formats('Payer' => String) do + Fog::Storage[:aws].get_request_payment(@aws_bucket_name).body end - tests("#put_request_payment('fogbuckettests', 'Requester')").succeeds do - Fog::Storage[:aws].put_request_payment('fogbuckettests', 'Requester') + tests("#put_request_payment('#{@aws_bucket_name}', 'Requester')").succeeds do + Fog::Storage[:aws].put_request_payment(@aws_bucket_name, 'Requester') end - tests("#put_bucket_website('fogbuckettests', 'index.html')").succeeds do + tests("#put_bucket_website('#{@aws_bucket_name}', 'index.html')").succeeds do pending if Fog.mocking? - Fog::Storage[:aws].put_bucket_website('fogbuckettests', 'index.html') + Fog::Storage[:aws].put_bucket_website(@aws_bucket_name, 'index.html') end - tests("#delete_bucket_website('fogbuckettests')").succeeds do - pending if Fog.mocking? - Fog::Storage[:aws].delete_bucket_website('fogbuckettests') + tests("#put_bucket_acl('#{@aws_bucket_name}', 'private')").succeeds do + Fog::Storage[:aws].put_bucket_acl(@aws_bucket_name, 'private') end - tests("#delete_bucket('fogbuckettests')").succeeds do - Fog::Storage[:aws].delete_bucket('fogbuckettests') - end + acl = { + 'Owner' => @aws_owner, + 'AccessControlList' => [ + { + 'Grantee' => @aws_owner, + 'Permission' => "FULL_CONTROL" + } + ] + } + tests("#put_bucket_acl('#{@aws_bucket_name}', hash with id)").returns(acl) do + pending if Fog.mocking? - tests("#put_bucket_acl('fogbuckettests', 'private')").succeeds do - Fog::Storage[:aws].put_bucket_acl('fogbuckettests', 'private') + Fog::Storage[:aws].put_bucket_acl(@aws_bucket_name, acl) + Fog::Storage[:aws].get_bucket_acl(@aws_bucket_name).body end - tests("#put_bucket_acl('fogbuckettests', hash)").returns(true) do - Fog::Storage[:aws].put_bucket_acl('fogbuckettests', { - 'Owner' => { 'ID' => "8a6925ce4adf5f21c32aa379004fef", 'DisplayName' => "mtd@amazon.com" }, + tests("#put_bucket_acl('#{@aws_bucket_name}', hash with email)").returns({ + 'Owner' => @aws_owner, 'AccessControlList' => [ - { - 'Grantee' => { 'ID' => "8a6925ce4adf588a4532142d3f74dd8c71fa124b1ddee97f21c32aa379004fef", 'DisplayName' => "mtd@amazon.com" }, - 'Permission' => "FULL_CONTROL" + { + 'Grantee' => { 'ID' => 'f62f0218873cfa5d56ae9429ae75a592fec4fd22a5f24a20b1038a7db9a8f150', 'DisplayName' => 'mtd' }, + 'Permission' => "FULL_CONTROL" + } + ] + }) do + pending if Fog.mocking? + Fog::Storage[:aws].put_bucket_acl(@aws_bucket_name, { + 'Owner' => @aws_owner, + 'AccessControlList' => [ + { + 'Grantee' => { 'EmailAddress' => 'mtd@amazon.com' }, + 'Permission' => "FULL_CONTROL" } ] }) - Fog::Storage[:aws].get_bucket_acl('fogbuckettests').body == <<-BODY - - - 8a6925ce4adf5f21c32aa379004fef - mtd@amazon.com - - - - - 8a6925ce4adf588a4532142d3f74dd8c71fa124b1ddee97f21c32aa379004fef - mtd@amazon.com - - FULL_CONTROL - - - -BODY + Fog::Storage[:aws].get_bucket_acl(@aws_bucket_name).body + end + + acl = { + 'Owner' => @aws_owner, + 'AccessControlList' => [ + { + 'Grantee' => { 'URI' => 'http://acs.amazonaws.com/groups/global/AllUsers' }, + 'Permission' => "FULL_CONTROL" + } + ] + } + tests("#put_bucket_acl('#{@aws_bucket_name}', hash with uri)").returns(acl) do + pending if Fog.mocking? + Fog::Storage[:aws].put_bucket_acl(@aws_bucket_name, acl) + Fog::Storage[:aws].get_bucket_acl(@aws_bucket_name).body + end + + tests("#delete_bucket_website('#{@aws_bucket_name}')").succeeds do + pending if Fog.mocking? + Fog::Storage[:aws].delete_bucket_website(@aws_bucket_name) + end + + tests("#delete_bucket('#{@aws_bucket_name}')").succeeds do + Fog::Storage[:aws].delete_bucket(@aws_bucket_name) end end @@ -201,4 +227,6 @@ end + # don't keep the bucket around + Fog::Storage[:aws].delete_bucket(@aws_bucket_name) rescue nil end diff --git a/tests/aws/requests/storage/object_tests.rb b/tests/aws/requests/storage/object_tests.rb index 04bf487224..50ebf2405a 100644 --- a/tests/aws/requests/storage/object_tests.rb +++ b/tests/aws/requests/storage/object_tests.rb @@ -1,6 +1,6 @@ Shindo.tests('AWS::Storage | object requests', ['aws']) do - - @directory = Fog::Storage[:aws].directories.create(:key => 'fogobjecttests') + @directory = Fog::Storage[:aws].directories.create(:key => 'fogobjecttests-' + Time.now.to_i.to_s(32)) + @aws_owner = Fog::Storage[:aws].get_bucket_acl(@directory.key).body['Owner'] tests('success') do @@ -34,32 +34,52 @@ Fog::Storage[:aws].put_object_acl(@directory.identity, 'fog_object', 'private') end - tests("#put_object_acl('#{@directory.identity}', 'fog_object', hash)").returns(true) do + acl = { + 'Owner' => @aws_owner, + 'AccessControlList' => [ + { + 'Grantee' => @aws_owner, + 'Permission' => "FULL_CONTROL" + } + ]} + tests("#put_object_acl('#{@directory.identity}', 'fog_object', hash with id)").returns(acl) do + pending if Fog.mocking? + Fog::Storage[:aws].put_object_acl(@directory.identity, 'fog_object', acl) + Fog::Storage[:aws].get_object_acl(@directory.identity, 'fog_object').body + end + + tests("#put_object_acl('#{@directory.identity}', 'fog_object', hash with email)").returns({ + 'Owner' => @aws_owner, + 'AccessControlList' => [ + { + 'Grantee' => { 'ID' => 'f62f0218873cfa5d56ae9429ae75a592fec4fd22a5f24a20b1038a7db9a8f150', 'DisplayName' => 'mtd' }, + 'Permission' => "FULL_CONTROL" + } + ]}) do + pending if Fog.mocking? Fog::Storage[:aws].put_object_acl(@directory.identity, 'fog_object', { - 'Owner' => { 'ID' => "8a6925ce4adf5f21c32aa379004fef", 'DisplayName' => "mtd@amazon.com" }, + 'Owner' => @aws_owner, 'AccessControlList' => [ - { - 'Grantee' => { 'ID' => "8a6925ce4adf588a4532142d3f74dd8c71fa124b1ddee97f21c32aa379004fef", 'DisplayName' => "mtd@amazon.com" }, - 'Permission' => "FULL_CONTROL" + { + 'Grantee' => { 'EmailAddress' => 'mtd@amazon.com' }, + 'Permission' => "FULL_CONTROL" } ]}) - Fog::Storage[:aws].get_object_acl(@directory.identity, 'fog_object').body == <<-BODY - - - 8a6925ce4adf5f21c32aa379004fef - mtd@amazon.com - - - - - 8a6925ce4adf588a4532142d3f74dd8c71fa124b1ddee97f21c32aa379004fef - mtd@amazon.com - - FULL_CONTROL - - - -BODY + Fog::Storage[:aws].get_object_acl(@directory.identity, 'fog_object').body + end + + acl = { + 'Owner' => @aws_owner, + 'AccessControlList' => [ + { + 'Grantee' => { 'URI' => 'http://acs.amazonaws.com/groups/global/AllUsers' }, + 'Permission' => "FULL_CONTROL" + } + ]} + tests("#put_object_acl('#{@directory.identity}', 'fog_object', hash with uri)").returns(acl) do + pending if Fog.mocking? + Fog::Storage[:aws].put_object_acl(@directory.identity, 'fog_object', acl) + Fog::Storage[:aws].get_object_acl(@directory.identity, 'fog_object').body end tests("#delete_object('#{@directory.identity}', 'fog_object')").succeeds do diff --git a/tests/brightbox/requests/compute/helper.rb b/tests/brightbox/requests/compute/helper.rb index 1625e00483..6271193abf 100644 --- a/tests/brightbox/requests/compute/helper.rb +++ b/tests/brightbox/requests/compute/helper.rb @@ -58,13 +58,7 @@ module Nested "resource_type" => String, "url" => String, "id" => String, - "status" => String, - "ram_limit" => Integer, - "ram_used" => Integer, - "cloud_ips_limit" => Integer, - "cloud_ips_used" => Integer, - "load_balancers_limit" => Integer, - "load_balancers_used" => Integer + "status" => String } API_CLIENT = { @@ -85,6 +79,27 @@ module Nested "reverse_dns" => String } + FIREWALL_POLICY = { + "id" => String, + "resource_type" => String, + "url" => String, + "name" => String, + "default" => Fog::Boolean + } + + FIREWALL_RULE = { + "id" => String, + "resource_type" => String, + "url" => String, + "source" => Fog::Nullable::String, + "source_port" => Fog::Nullable::String, + "destination" => Fog::Nullable::String, + "destination_port" => Fog::Nullable::String, + "protocol" => String, + "icmp_type_name" => Fog::Nullable::String, + "description" => Fog::Nullable::String + } + IMAGE = { "name" => String, "created_at" => String, @@ -180,6 +195,31 @@ module Collected "server" => Fog::Brightbox::Nullable::Server } + FIREWALL_POLICY = { + "id" => String, + "resource_type" => String, + "url" => String, + "name" => String, + "description" => Fog::Nullable::String, + "default" => Fog::Boolean, + "server_group" => Brightbox::Compute::Formats::Nested::SERVER_GROUP, + "rules" => [Brightbox::Compute::Formats::Nested::FIREWALL_RULE] + } + + FIREWALL_RULE = { + "id" => String, + "resource_type" => String, + "url" => String, + "source" => String, + "source_port" => String, + "destination" => String, + "destination_port" => String, + "protocol" => String, + "icmp_type_name" => String, + "description" => Fog::Nullable::String, + "firewall_policy" => Brightbox::Compute::Formats::Nested::FIREWALL_POLICY + } + IMAGE = { "name" => String, "created_at" => String, @@ -339,6 +379,30 @@ module Full "server" => Fog::Brightbox::Nullable::Server } + FIREWALL_POLICY = { + "id" => String, + "resource_type" => String, + "url" => String, + "name" => String, + "description" => Fog::Nullable::String, + "default" => Fog::Boolean, + "server_group" => Brightbox::Compute::Formats::Nested::SERVER_GROUP, + "rules" => [Brightbox::Compute::Formats::Nested::FIREWALL_RULE] + } + + FIREWALL_RULE = { + "id" => String, + "resource_type" => String, + "url" => String, + "source" => String, + "source_port" => String, + "destination" => String, + "destination_port" => String, + "protocol" => String, + "icmp_type_name" => String, + "description" => Fog::Nullable::String + } + IMAGE = { "name" => String, "created_at" => String, @@ -458,6 +522,8 @@ module Collection API_CLIENTS = [Brightbox::Compute::Formats::Collected::API_CLIENT] CLOUD_IPS = [Brightbox::Compute::Formats::Collected::CLOUD_IP] IMAGES = [Brightbox::Compute::Formats::Collected::IMAGE] + FIREWALL_POLICIES = [Brightbox::Compute::Formats::Collected::FIREWALL_POLICY] + FIREWALL_RULES = [Brightbox::Compute::Formats::Collected::FIREWALL_RULE] LOAD_BALANCERS = [Brightbox::Compute::Formats::Collected::LOAD_BALANCER] SERVERS = [Brightbox::Compute::Formats::Collected::SERVER] SERVER_GROUPS = [Brightbox::Compute::Formats::Collected::SERVER_GROUP] diff --git a/tests/brightbox/requests/compute/load_balancer_tests.rb b/tests/brightbox/requests/compute/load_balancer_tests.rb index 5c9f1997a5..84b0668c62 100644 --- a/tests/brightbox/requests/compute/load_balancer_tests.rb +++ b/tests/brightbox/requests/compute/load_balancer_tests.rb @@ -34,6 +34,7 @@ end tests("#list_load_balancers()").formats(Brightbox::Compute::Formats::Collection::LOAD_BALANCERS) do + pending if Fog.mocking? Fog::Compute[:brightbox].list_load_balancers end diff --git a/tests/compute/models/flavors_tests.rb b/tests/compute/models/flavors_tests.rb index 48fcea7cc9..da110e056c 100644 --- a/tests/compute/models/flavors_tests.rb +++ b/tests/compute/models/flavors_tests.rb @@ -1,6 +1,6 @@ for provider, config in compute_providers - next if [:voxel].include?(provider) + next if [:glesys, :voxel].include?(provider) Shindo.tests("Fog::Compute[:#{provider}] | flavors", [provider]) do diff --git a/tests/core/attribute_tests.rb b/tests/core/attribute_tests.rb index 2b26c72a9b..63fd5e57a6 100644 --- a/tests/core/attribute_tests.rb +++ b/tests/core/attribute_tests.rb @@ -1,6 +1,7 @@ class FogAttributeTestModel < Fog::Model attribute :key, :aliases => 'keys', :squash => "id" attribute :time, :type => :time + attribute :bool, :type => :boolean end Shindo.tests('Fog::Attributes', 'core') do @@ -51,4 +52,32 @@ class FogAttributeTestModel < Fog::Model end + tests(':type => :boolean') do + tests(':bool => "true"').returns(true) do + @model.merge_attributes(:bool => 'true') + @model.bool + end + + tests(':bool => true').returns(true) do + @model.merge_attributes(:bool => true) + @model.bool + end + + tests(':bool => "false"').returns(false) do + @model.merge_attributes(:bool => 'false') + @model.bool + end + + tests(':bool => false').returns(false) do + @model.merge_attributes(:bool => false) + @model.bool + end + + tests(':bool => "foo"').returns(nil) do + @model.merge_attributes(:bool => "foo") + @model.bool + end + + end + end diff --git a/tests/core/credential_tests.rb b/tests/core/credential_tests.rb index 208f240197..cabbf7d270 100644 --- a/tests/core/credential_tests.rb +++ b/tests/core/credential_tests.rb @@ -3,6 +3,7 @@ @old_home = ENV['HOME'] @old_rc = ENV['FOG_RC'] @old_credential = ENV['FOG_CREDENTIAL'] + @old_credentials = Fog.credentials Fog.instance_variable_set('@credential_path', nil) # kill memoization Fog.instance_variable_set('@credential', nil) # kill memoization end @@ -11,6 +12,7 @@ ENV['HOME'] = @old_home ENV['FOG_RC'] = @old_rc ENV['FOG_CREDENTIAL'] = @old_credential + Fog.credentials = @old_credentials end tests('credential') do diff --git a/tests/helper.rb b/tests/helper.rb index 1a064dce4d..aee2d568a8 100644 --- a/tests/helper.rb +++ b/tests/helper.rb @@ -1,5 +1,5 @@ require 'fog' -require 'fog/bin' +require 'fog/bin' # for available_providers require File.expand_path(File.join(File.dirname(__FILE__), 'helpers', 'mock_helper')) @@ -7,8 +7,12 @@ def lorem_file File.open(File.dirname(__FILE__) + '/lorem.txt', 'r') end +def array_differences(array_a, array_b) + (array_a - array_b) | (array_b - array_a) +end + # check to see which credentials are available and add others to the skipped tags list -all_providers = ['aws', 'bluebox', 'brightbox', 'dnsimple', 'dnsmadeeasy', 'dynect', 'ecloud', 'glesys', 'gogrid', 'google', 'linode', 'local', 'ninefold', 'newservers', 'rackspace', 'slicehost', 'stormondemand', 'voxel', 'zerigo'] +all_providers = ['aws', 'bluebox', 'brightbox', 'dnsimple', 'dnsmadeeasy', 'dynect', 'ecloud', 'glesys', 'gogrid', 'google', 'linode', 'local', 'ninefold', 'newservers', 'openstack', 'rackspace', 'slicehost', 'stormondemand', 'voxel', 'zerigo'] available_providers = Fog.available_providers.map {|provider| provider.downcase} for provider in (all_providers - available_providers) Formatador.display_line("[yellow]Skipping tests for [bold]#{provider}[/] [yellow]due to lacking credentials (add some to '~/.fog' to run them)[/]") diff --git a/tests/helpers/collection_helper.rb b/tests/helpers/collection_helper.rb index 7dd1a790e4..c37493fec5 100644 --- a/tests/helpers/collection_helper.rb +++ b/tests/helpers/collection_helper.rb @@ -12,6 +12,11 @@ def collection_tests(collection, params = {}, mocks_implemented = true) @instance = collection.create(params) end + # FIXME: work around for timing issue on AWS describe_instances mocks + if Fog.mocking? && @instance.respond_to?(:ready?) + @instance.wait_for { ready? } + end + tests("#all").succeeds do pending if Fog.mocking? && !mocks_implemented collection.all diff --git a/tests/helpers/compute/server_helper.rb b/tests/helpers/compute/server_helper.rb index 1dc7b6f9ca..a8cd1df458 100644 --- a/tests/helpers/compute/server_helper.rb +++ b/tests/helpers/compute/server_helper.rb @@ -4,6 +4,7 @@ def server_tests(connection, params = {}, mocks_implemented = true) tests('#reload').returns(true) do pending if Fog.mocking? && !mocks_implemented + @instance.wait_for { ready? } identity = @instance.identity !identity.nil? && identity == @instance.reload.identity end diff --git a/tests/helpers/formats_helper.rb b/tests/helpers/formats_helper.rb index 3409f660be..bf9165b876 100644 --- a/tests/helpers/formats_helper.rb +++ b/tests/helpers/formats_helper.rb @@ -9,6 +9,8 @@ module Integer; end module String; end module Time; end module Float; end + module Hash; end + module Array; end end end [FalseClass, TrueClass].each {|klass| klass.send(:include, Fog::Boolean)} @@ -17,21 +19,23 @@ module Float; end [NilClass, Time].each {|klass| klass.send(:include, Fog::Nullable::Time)} [Integer, NilClass].each {|klass| klass.send(:include, Fog::Nullable::Integer)} [Float, NilClass].each {|klass| klass.send(:include, Fog::Nullable::Float)} +[Hash, NilClass].each {|klass| klass.send(:include, Fog::Nullable::Hash)} +[Array, NilClass].each {|klass| klass.send(:include, Fog::Nullable::Array)} module Shindo class Tests - def formats(format) + def formats(format, strict=true) raise ArgumentError, 'format is nil' unless format test('has proper format') do - formats_kernel(instance_eval(&Proc.new), format) + formats_kernel(instance_eval(&Proc.new), format, true, strict) end end private - def formats_kernel(original_data, original_format, original = true) + def formats_kernel(original_data, original_format, original = true, strict = true) valid = true data = original_data.dup format = original_format.dup @@ -44,20 +48,20 @@ def formats_kernel(original_data, original_format, original = true) format.delete(key) case value when Array - valid &&= datum.is_a?(Array) || p("not Array: #{datum.inspect}") + valid &&= datum.is_a?(Array) || p("#{key.inspect} not Array: #{datum.inspect}") if datum.is_a?(Array) && !value.empty? for element in datum type = value.first if type.is_a?(Hash) - valid &&= formats_kernel({:element => element}, {:element => type}, false) + valid &&= formats_kernel({:element => element}, {:element => type}, false, strict) else valid &&= element.is_a?(type) end end end when Hash - valid &&= datum.is_a?(Hash) || p("not Hash: #{datum.inspect}") - valid &&= formats_kernel(datum, value, false) + valid &&= datum.is_a?(Hash) || p("#{key.inspect} not Hash: #{datum.inspect}") + valid &&= formats_kernel(datum, value, false, strict) else p "#{key.inspect} not #{value.inspect}: #{datum.inspect}" unless datum.is_a?(value) valid &&= datum.is_a?(value) @@ -65,7 +69,11 @@ def formats_kernel(original_data, original_format, original = true) end p data unless data.empty? p format unless format.empty? - valid &&= data.empty? && format.empty? + if strict + valid &&= data.empty? && format.empty? + else + valid &&= format.empty? + end if !valid && original @message = "#{original_data.inspect} does not match #{original_format.inspect}" end diff --git a/tests/helpers/formats_helper_tests.rb b/tests/helpers/formats_helper_tests.rb index 64ff8cc939..0b3c15aeec 100644 --- a/tests/helpers/formats_helper_tests.rb +++ b/tests/helpers/formats_helper_tests.rb @@ -20,6 +20,10 @@ formats_kernel([{:a => :b}], [{:a => Symbol}]) end + test('non strict extra data') do + formats_kernel({:a => :b, :b => :c}, {:a => Symbol}, true, false) + end + end tests('returns false') do @@ -36,6 +40,10 @@ !formats_kernel({}, {:a => String}) end + test('non strict extra data') do + !formats_kernel({:a => :b, :b => :c}, {:z => Symbol}, true, false) + end + end end diff --git a/tests/helpers/mock_helper.rb b/tests/helpers/mock_helper.rb index 260fe47c17..a5a34d3403 100644 --- a/tests/helpers/mock_helper.rb +++ b/tests/helpers/mock_helper.rb @@ -8,7 +8,7 @@ # if in mocked mode, fill in some fake credentials for us if Fog.mock? - Fog.instance_variable_set(:@credentials, { + Fog.credentials = { :aws_access_key_id => 'aws_access_key_id', :aws_secret_access_key => 'aws_secret_access_key', :bluebox_api_key => 'bluebox_api_key', @@ -38,6 +38,10 @@ :ninefold_storage_token => 'ninefold_storage_token', # :public_key_path => '~/.ssh/id_rsa.pub', # :private_key_path => '~/.ssh/id_rsa', + :openstack_api_key => 'openstack_api_key', + :openstack_username => 'openstack_username', + :openstack_tenant => 'openstack_tenant', + :openstack_auth_url => 'openstack_auth_url', :rackspace_api_key => 'rackspace_api_key', :rackspace_username => 'rackspace_username', :slicehost_password => 'slicehost_password', @@ -57,5 +61,5 @@ :vsphere_username => 'apiuser', :vsphere_password => 'apipassword', :vsphere_expected_pubkey_hash => 'abcdef1234567890' - }) + } end diff --git a/tests/linode/requests/compute/linodeplans_tests.rb b/tests/linode/requests/compute/linodeplans_tests.rb index 29126f7eb0..f656b36590 100644 --- a/tests/linode/requests/compute/linodeplans_tests.rb +++ b/tests/linode/requests/compute/linodeplans_tests.rb @@ -7,7 +7,8 @@ '3' => Integer, '4' => Integer, '6' => Integer, - '7' => Integer + '7' => Integer, + '8' => Integer }, 'DISK' => Integer, 'PLANID' => Integer, diff --git a/tests/ninefold/requests/compute/helper.rb b/tests/ninefold/requests/compute/helper.rb index aa99dc6b7f..488dc047cc 100644 --- a/tests/ninefold/requests/compute/helper.rb +++ b/tests/ninefold/requests/compute/helper.rb @@ -37,9 +37,13 @@ module Lists "storagetype" => String, "offerha" => Fog::Boolean, "domainid" => Integer, - "domain" => String + "domain" => String, + "issystem" => Fog::Boolean, + "limitcpuuse" => Fog::Boolean, + "defaultuse" => Fog::Boolean + } - #SERVICE_OFFERINGS = [Ninefold::Compute::Formats::Lists::SERVICE_OFFERING] + SERVICE_OFFERINGS = [Ninefold::Compute::Formats::Lists::SERVICE_OFFERING] ACCOUNTS = [{ "id"=>Integer, "name"=>String, @@ -95,19 +99,6 @@ module Lists "state"=>String, "parentid"=>Integer }] - SERVICE_OFFERINGS = [{ - "id"=>Integer, - "name"=>String, - "displaytext"=>String, - "cpunumber"=>Integer, - "cpuspeed"=>Integer, - "memory"=>Integer, - "created"=>String, - "storagetype"=>String, - "offerha"=>Fog::Boolean, - "domainid"=>Integer, - "domain"=>String - }] DISK_OFFERINGS = [{ "id"=>Integer, "domainid"=>Integer, @@ -129,6 +120,7 @@ module Lists }] ZONES = [{ "allocationstate"=>String, + "dhcpprovider"=>String, "id"=>Integer, "name"=>String, "networktype"=>String, @@ -266,8 +258,13 @@ module Networks "domain"=>String, "isdefault"=>Fog::Boolean, "service"=>Array, - "networkdomain"=>String, - "securitygroupenabled"=>Fog::Boolean + "networkdomain"=>Fog::Nullable::String, + "securitygroupenabled"=>Fog::Boolean, + "netmask"=>Fog::Nullable::String, + "startip"=>Fog::Nullable::String, + "endip"=>Fog::Nullable::String, + "gateway"=>Fog::Nullable::String, + "vlan"=>Fog::Nullable::String }] end module Addresses diff --git a/tests/openstack/requests/compute/flavor_tests.rb b/tests/openstack/requests/compute/flavor_tests.rb new file mode 100644 index 0000000000..f67ca54e4c --- /dev/null +++ b/tests/openstack/requests/compute/flavor_tests.rb @@ -0,0 +1,35 @@ +Shindo.tests('Fog::Compute[:openstack] | flavor requests', ['openstack']) do + + @flavor_format = { + 'id' => String, + 'name' => String, + 'disk' => Integer, + 'ram' => Integer, + 'links' => Array + } + + tests('success') do + + tests('#get_flavor_details(1)').formats(@flavor_format, false) do + Fog::Compute[:openstack].get_flavor_details("1").body['flavor'] + end + + tests('#list_flavors').formats({'flavors' => [OpenStack::Compute::Formats::SUMMARY]}) do + Fog::Compute[:openstack].list_flavors.body + end + + tests('#list_flavors_detail').formats({'flavors' => [@flavor_format]}, false) do + Fog::Compute[:openstack].list_flavors_detail.body + end + + end + + tests('failure') do + + tests('#get_flavor_details(0)').raises(Fog::Compute::OpenStack::NotFound) do + Fog::Compute[:openstack].get_flavor_details("0") + end + + end + +end diff --git a/tests/openstack/requests/compute/helper.rb b/tests/openstack/requests/compute/helper.rb new file mode 100644 index 0000000000..8d8e166c28 --- /dev/null +++ b/tests/openstack/requests/compute/helper.rb @@ -0,0 +1,17 @@ +class OpenStack + + module Compute + + module Formats + + SUMMARY = { + 'id' => String, + 'name' => String, + 'links' => Array + } + + end + + end + +end diff --git a/tests/openstack/requests/compute/image_tests.rb b/tests/openstack/requests/compute/image_tests.rb new file mode 100644 index 0000000000..12c62e3102 --- /dev/null +++ b/tests/openstack/requests/compute/image_tests.rb @@ -0,0 +1,59 @@ +require 'fog/openstack' + +Shindo.tests('Fog::Compute[:openstack] | image requests', ['openstack']) do + + @image_format = { + 'created' => Fog::Nullable::String, + 'id' => String, + 'name' => String, + 'progress' => Fog::Nullable::Integer, + 'status' => String, + 'updated' => String, + 'minRam' => Integer, + 'minDisk' => Integer, + 'server' => Fog::Nullable::Hash, + 'metadata' => Hash, + 'links' => Array + } + + tests('success') do + + @image_id = Fog::Compute[:openstack].images[0].id + + unless Fog.mocking? + Fog::Compute[:openstack].images.get(@image_id).wait_for { ready? } + end + tests("#get_image_details(#{@image_id})").formats(@image_format) do + pending if Fog.mocking? + Fog::Compute[:openstack].get_image_details(@image_id).body['image'] + end + + tests('#list_images').formats({'images' => [OpenStack::Compute::Formats::SUMMARY]}) do + Fog::Compute[:openstack].list_images.body + end + + tests('#list_images_detail').formats({'images' => [@image_format]}) do + Fog::Compute[:openstack].list_images_detail.body + end + + unless Fog.mocking? + Fog::Compute[:openstack].images.get(@image_id).wait_for { ready? } + end + + end + + tests('failure') do + + tests('#delete_image(0)').raises(Fog::Compute::OpenStack::NotFound) do + pending if Fog.mocking? + Fog::Compute[:openstack].delete_image(0) + end + + tests('#get_image_details(0)').raises(Fog::Compute::OpenStack::NotFound) do + pending if Fog.mocking? + Fog::Compute[:openstack].get_image_details(0) + end + + end + +end diff --git a/tests/openstack/requests/compute/server_tests.rb b/tests/openstack/requests/compute/server_tests.rb new file mode 100644 index 0000000000..a4ff1dab4f --- /dev/null +++ b/tests/openstack/requests/compute/server_tests.rb @@ -0,0 +1,142 @@ +Shindo.tests('Fog::Compute[:openstack] | server requests', ['openstack']) do + + @server_format = { + 'id' => String, + 'addresses' => Hash, + 'flavor' => Hash, + 'hostId' => String, + 'image' => Hash, + 'metadata' => Hash, + 'name' => String, + 'progress' => Integer, + 'status' => String, + 'accessIPv4' => Fog::Nullable::String, + 'accessIPv6' => Fog::Nullable::String, + 'links' => Array + } + + @image_format = { + 'created' => Fog::Nullable::String, + 'id' => String, + 'name' => String, + 'progress' => Fog::Nullable::Integer, + 'status' => String, + 'updated' => String, + 'minRam' => Integer, + 'minDisk' => Integer, + 'server' => Hash, + 'metadata' => Hash, + 'links' => Array + } + + tests('success') do + + @image_id = Fog::Compute[:openstack].images[0].id + @snapshot_id = nil + @flavor_id = 2 + + tests('#create_server("test", #{@image_id} , 19)').formats(@server_format.merge('adminPass' => String), false) do + data = Fog::Compute[:openstack].create_server("test", @image_id, @flavor_id).body['server'] + @server_id = data['id'] + data + end + + Fog::Compute[:openstack].servers.get(@server_id).wait_for { ready? } + + #CREATE + tests("#get_server_details(#{@server_id})").formats(@server_format, false) do + Fog::Compute[:openstack].get_server_details(@server_id).body['server'] + end + + #LIST + #NOTE: we can remove strict=false if we remove uuid from GET /servers + tests('#list_servers').formats({'servers' => [OpenStack::Compute::Formats::SUMMARY]}, false) do + Fog::Compute[:openstack].list_servers.body + end + + #DETAILS + tests('#list_servers_detail').formats({'servers' => [@server_format]}, false) do + Fog::Compute[:openstack].list_servers_detail.body + end + + #CHANGE PASSWORD + tests("#change_password_server(#{@server_id}, 'fogupdatedserver')").succeeds do + Fog::Compute[:openstack].change_password_server(@server_id, 'foggy') + end + Fog::Compute[:openstack].servers.get(@server_id).wait_for { ready? } + + #UPDATE SERVER NAME + tests("#update_server(#{@server_id}, :name => 'fogupdatedserver')").succeeds do + Fog::Compute[:openstack].update_server(@server_id, :name => 'fogupdatedserver') + end + Fog::Compute[:openstack].servers.get(@server_id).wait_for { ready? } + + #CREATE IMAGE WITH METADATA + tests("#create_image(#{@server_id}, 'fog')").formats('image' => @image_format) do + data = Fog::Compute[:openstack].create_image(@server_id, 'fog', {"foo" => "bar"}).body + @snapshot_id = data['image']['id'] + data + end + Fog::Compute[:openstack].images.get(@snapshot_id).wait_for { ready? } + + #REBUILD + tests("#rebuild_server(#{@server_id}, #{@snapshot_id}, 'fog')").formats({'server' => @server_format}, false) do + Fog::Compute[:openstack].rebuild_server(@server_id, @snapshot_id, 'fog', 'newpass', {"foo" => "bar"}).body + end + Fog::Compute[:openstack].servers.get(@server_id).wait_for { ready? } if not Fog.mocking? + + #RESIZE + tests("#resize_server(#{@server_id}, '3')").succeeds do + Fog::Compute[:openstack].resize_server(@server_id, 3) + end + Fog::Compute[:openstack].servers.get(@server_id).wait_for { self.state == 'VERIFY_RESIZE' } if not Fog.mocking? + + #RESIZE CONFIRM + tests("#resize_confirm(#{@server_id}, '3')").succeeds do + Fog::Compute[:openstack].confirm_resized_server(@server_id) + end + Fog::Compute[:openstack].servers.get(@server_id).wait_for { ready? } if not Fog.mocking? + + #REBOOT - HARD + tests("#reboot_server(#{@server_id}, 'HARD')").succeeds do + Fog::Compute[:openstack].reboot_server(@server_id, 'HARD') + end + + Fog::Compute[:openstack].servers.get(@server_id).wait_for { ready? } if not Fog.mocking? + + #REBOOT - SOFT + tests("#reboot_server(#{@server_id}, 'SOFT')").succeeds do + Fog::Compute[:openstack].reboot_server(@server_id, 'SOFT') + end + + Fog::Compute[:openstack].servers.get(@server_id).wait_for { ready? } if not Fog.mocking? + + #DELETE + tests("#delete_server(#{@server_id})").succeeds do + Fog::Compute[:openstack].delete_server(@server_id) + end + + end + + tests('failure') do + + tests('#delete_server(0)').raises(Fog::Compute::OpenStack::NotFound) do + Fog::Compute[:openstack].delete_server(0) + end + + tests('#get_server_details(0)').raises(Fog::Compute::OpenStack::NotFound) do + Fog::Compute[:openstack].get_server_details(0) + end + + tests("#update_server(0, :name => 'fogupdatedserver', :adminPass => 'fogupdatedserver')").raises(Fog::Compute::OpenStack::NotFound) do + Fog::Compute[:openstack].update_server(0, :name => 'fogupdatedserver', :adminPass => 'fogupdatedserver') + end + + tests('#reboot_server(0)').raises(Fog::Compute::OpenStack::NotFound) do + pending if Fog.mocking? + Fog::Compute[:openstack].reboot_server(0) + end + + end + +end diff --git a/tests/rackspace/requests/dns/dns_tests.rb b/tests/rackspace/requests/dns/dns_tests.rb index ce72c98ca5..2eb338e288 100644 --- a/tests/rackspace/requests/dns/dns_tests.rb +++ b/tests/rackspace/requests/dns/dns_tests.rb @@ -1,31 +1,31 @@ Shindo.tests('Fog::DNS[:rackspace] | DNS requests', ['rackspace', 'dns']) do - @service = Fog::DNS[:rackspace] + pending if Fog.mocking? tests('success on simple domain') do - domain_tests(@service, {:name => 'basictestdomain.com', :email => 'hostmaster@basictestdomain.com', :records => [{:ttl => 300, :name => 'basictestdomain.com', :type => 'A', :data => '192.168.1.1'}]}) do + domain_tests(Fog::DNS[:rackspace], {:name => 'basictestdomain.com', :email => 'hostmaster@basictestdomain.com', :records => [{:ttl => 300, :name => 'basictestdomain.com', :type => 'A', :data => '192.168.1.1'}]}) do - tests('list_domains').formats(LIST_DOMAIN_FORMAT) do - @service.list_domains.body + tests('list_domains').formats(LIST_DOMAIN_FORMAT.reject {|key,value| key == 'links'}) do + Fog::DNS[:rackspace].list_domains.body end tests("list_domains :limit => 5, :offset => 10, :domain => #{@domain_details.first['name']} --> All possible attributes").formats(LIST_DOMAIN_FORMAT) do - @service.list_domains(:limit => 5, :offset => 10, :domain => @domain_details.first['name']).body + Fog::DNS[:rackspace].list_domains(:limit => 5, :offset => 10, :domain => @domain_details.first['name']).body end tests("list_domain_details('#{@domain_id}')").formats(LIST_DOMAIN_DETAILS_WITH_RECORDS) do - @service.list_domain_details(@domain_id).body + Fog::DNS[:rackspace].list_domain_details(@domain_id).body end tests("modify_domain('#{@domain_id}', :ttl => 500, :comment => 'woot', :email => 'randomemail@randomhost.com')").succeeds do - response = @service.modify_domain @domain_id, :ttl => 500, :comment => 'woot', :email => 'randomemail@randomhost.com' - wait_for @service, response + response = Fog::DNS[:rackspace].modify_domain @domain_id, :ttl => 500, :comment => 'woot', :email => 'randomemail@randomhost.com' + wait_for Fog::DNS[:rackspace], response end end end tests('success for domain with multiple records') do - domain_tests(@service, + domain_tests(Fog::DNS[:rackspace], { :name => 'testdomainwithmultiplerecords.com', :email => 'hostmaster@testdomainwithmultiplerecords.com', @@ -49,7 +49,7 @@ end tests('success for multiple domains') do - domains_tests(@service, + domains_tests(Fog::DNS[:rackspace], [ {:name => 'basictestdomain1.com', :email => 'hostmaster@basictestdomain1.com', :records => [{:ttl => 300, :name =>'basictestdomain1.com', :type => 'A', :data => '192.168.1.1'}]}, {:name => 'basictestdomain2.com', :email => 'hostmaster@basictestdomain2.com', :records => [{:ttl => 300, :name =>'basictestdomain2.com', :type => 'A', :data => '192.168.1.1'}]} @@ -57,7 +57,7 @@ end tests('success for domain with subdomain') do - domains_tests(@service, + domains_tests(Fog::DNS[:rackspace], [ {:name => 'basictestdomain.com', :email => 'hostmaster@basictestdomain.com', :records => [{:ttl => 300, :name =>'basictestdomain.com', :type => 'A', :data => '192.168.1.1'}]}, {:name => 'subdomain.basictestdomain.com', :email => 'hostmaster@subdomain.basictestdomain.com', :records => [{:ttl => 300, :name =>'subdomain.basictestdomain.com', :type => 'A', :data => '192.168.1.1'}]} @@ -66,7 +66,7 @@ @root_domain_id = @domain_details.find { |domain| domain['name'] == 'basictestdomain.com' }['id'] tests("list_domain_details('#{@root_domain_id}', :show_records => false, :show_subdomains => false)") do - response = @service.list_domain_details(@root_domain_id, :show_records => false, :show_subdomains => false) + response = Fog::DNS[:rackspace].list_domain_details(@root_domain_id, :show_records => false, :show_subdomains => false) formats(LIST_DOMAIN_DETAILS_WITHOUT_RECORDS_AND_SUBDOMAINS_FORMAT) { response.body } returns(nil) { response.body['recordsList'] } @@ -74,7 +74,7 @@ end tests("list_domain_details('#{@root_domain_id}', :show_records => true, :show_subdomains => true)") do - response = @service.list_domain_details(@root_domain_id, :show_records => true, :show_subdomains => true) + response = Fog::DNS[:rackspace].list_domain_details(@root_domain_id, :show_records => true, :show_subdomains => true) formats(LIST_DOMAIN_DETAILS_WITH_RECORDS_AND_SUBDOMAINS_FORMAT) { response.body } returns(false) { response.body['recordsList'].nil? } @@ -82,14 +82,14 @@ end tests("list_subdomains('#{@root_domain_id}')").formats(LIST_SUBDOMAINS_FORMAT) do - @service.list_subdomains(@root_domain_id).body + Fog::DNS[:rackspace].list_subdomains(@root_domain_id).body end tests("remove_domain('#{@root_domain_id}', :delete_subdomains => true)") do - wait_for @service, @service.remove_domain(@root_domain_id, :delete_subdomains => true) + wait_for Fog::DNS[:rackspace], Fog::DNS[:rackspace].remove_domain(@root_domain_id, :delete_subdomains => true) test('domain and subdomains were really deleted') do - (@service.list_domains.body['domains'].collect { |domain| domain['name'] } & ['basictestdomain.com', 'subdomain.basictestdomain.com']).empty? + (Fog::DNS[:rackspace].list_domains.body['domains'].collect { |domain| domain['name'] } & ['basictestdomain.com', 'subdomain.basictestdomain.com']).empty? end end end @@ -98,22 +98,22 @@ tests( 'failure') do tests('create_domain(invalid)').raises(Fog::Rackspace::Errors::BadRequest) do - wait_for @service, @service.create_domains([{:name => 'badtestdomain.com', :email => '', :records => [{:ttl => 300, :name => 'badtestdomain.com', :type => 'A', :data => '192.168.1.1'}]}]) + wait_for Fog::DNS[:rackspace], Fog::DNS[:rackspace].create_domains([{:name => 'badtestdomain.com', :email => '', :records => [{:ttl => 300, :name => 'badtestdomain.com', :type => 'A', :data => '192.168.1.1'}]}]) end tests('list_domains :limit => 5, :offset => 8').raises(Fog::Rackspace::Errors::BadRequest) do - @service.list_domains :limit => 5, :offset => 8 + Fog::DNS[:rackspace].list_domains :limit => 5, :offset => 8 end tests('list_domain_details 34335353').raises(Fog::Rackspace::Errors::NotFound) do - @service.list_domain_details 34335353 + Fog::DNS[:rackspace].list_domain_details 34335353 end #tests('create_domains(#{domains})').raises(Fog::Rackspace::Errors::Conflict) do - # wait_for @service.create_domains(domains) + # wait_for Fog::DNS[:rackspace].create_domains(domains) #end #tests('remove_domain(34343435)').raises(Fog::DNS::Rackspace::DeleteFault) do - # @service.remove_domain 34343435 + # Fog::DNS[:rackspace].remove_domain 34343435 #end end end diff --git a/tests/rackspace/requests/dns/helper.rb b/tests/rackspace/requests/dns/helper.rb index 2de0a68025..2ecb4f185e 100644 --- a/tests/rackspace/requests/dns/helper.rb +++ b/tests/rackspace/requests/dns/helper.rb @@ -66,8 +66,16 @@ }) LIST_DOMAIN_DETAILS_WITH_RECORDS_AND_SUBDOMAINS_FORMAT = BASIC_DOMAIN_DETAIL_FORMAT.merge({ - 'recordsList' => RECORD_LIST_FORMAT, - 'subdomains' => [SUBDOMAIN_FORMAT] + 'recordsList' => RECORD_LIST_FORMAT, + 'subdomains' => { + 'domains' => [{ + 'created' => String, + 'name' => String, + 'id' => Integer, + 'updated' => String + }], + 'totalEntries' => Integer + } }) LIST_DOMAIN_DETAILS_WITHOUT_RECORDS_AND_SUBDOMAINS_FORMAT = BASIC_DOMAIN_DETAIL_FORMAT @@ -82,19 +90,19 @@ def wait_for(service, response) job_id = response.body['jobId'] - while true + Fog.wait_for do response = service.callback(job_id) - return response if response.status != 202 - sleep 5 + response.body['status'] != 'RUNNING' end + response end def domain_tests(service, domain_attributes) tests("create_domains([#{domain_attributes}])").formats(CREATE_DOMAINS_FORMAT) do response = wait_for service, service.create_domains([domain_attributes]) - @domain_details = response.body['domains'] + @domain_details = response.body['response']['domains'] @domain_id = @domain_details[0]['id'] - response.body + response.body['response'] end begin @@ -111,9 +119,9 @@ def domain_tests(service, domain_attributes) def domains_tests(service, domains_attributes, custom_delete = false) tests("create_domains(#{domains_attributes})").formats(CREATE_DOMAINS_FORMAT) do response = wait_for service, service.create_domains(domains_attributes) - @domain_details = response.body['domains'] + @domain_details = response.body['response']['domains'] @domain_ids = @domain_details.collect { |domain| domain['id'] } - response.body + response.body['response'] end begin diff --git a/tests/rackspace/requests/dns/records_tests.rb b/tests/rackspace/requests/dns/records_tests.rb index 373584ae69..386a4fc99e 100644 --- a/tests/rackspace/requests/dns/records_tests.rb +++ b/tests/rackspace/requests/dns/records_tests.rb @@ -1,31 +1,31 @@ Shindo.tests('Fog::DNS[:rackspace] | dns records requests', ['rackspace', 'dns']) do - @service = Fog::DNS[:rackspace] + pending if Fog.mocking? - domain_tests(@service, {:name => 'basictestdomain.com', :email => 'hostmaster@basictestdomain.com', :records => [{:ttl => 300, :name => 'basictestdomain.com', :type => 'A', :data => '192.168.1.1'}]}) do + domain_tests(Fog::DNS[:rackspace], {:name => 'basictestdomain.com', :email => 'hostmaster@basictestdomain.com', :records => [{:ttl => 300, :name => 'basictestdomain.com', :type => 'A', :data => '192.168.1.1'}]}) do tests('success on single record') do tests("list_records(#{@domain_id})").formats(RECORD_LIST_FORMAT) do - @service.list_records(@domain_id).body + Fog::DNS[:rackspace].list_records(@domain_id).body end tests("add_records(#{@domain_id}, [{ :name => 'test1.basictestdomain.com', :type => 'A', :data => '192.168.2.1'}])").formats(RECORD_LIST_FORMAT) do - response = wait_for @service, @service.add_records(@domain_id, [{ :name => 'test1.basictestdomain.com', :type => 'A', :data => '192.168.2.1'}]) + response = wait_for Fog::DNS[:rackspace], Fog::DNS[:rackspace].add_records(@domain_id, [{ :name => 'test1.basictestdomain.com', :type => 'A', :data => '192.168.2.1'}]) @record_id = response.body['records'].first['id'] response.body end tests("list_record_details(#{@domain_id}, #{@record_id})").formats(RECORD_FORMAT) do - @service.list_record_details(@domain_id, @record_id).body + Fog::DNS[:rackspace].list_record_details(@domain_id, @record_id).body end tests("modify_record(#{@domain_id}, #{@record_id}, { :ttl => 500, :name => 'test2.basictestdomain.com', :data => '192.168.3.1' })").succeeds do - wait_for @service, @service.modify_record(@domain_id, @record_id, { :ttl => 500, :name => 'test2.basictestdomain.com', :data => '192.168.3.1' }) + wait_for Fog::DNS[:rackspace], Fog::DNS[:rackspace].modify_record(@domain_id, @record_id, { :ttl => 500, :name => 'test2.basictestdomain.com', :data => '192.168.3.1' }) end tests("remove_record(#{@domain_id}, #{@record_id})").succeeds do - wait_for @service, @service.remove_record(@domain_id, @record_id) + wait_for Fog::DNS[:rackspace], Fog::DNS[:rackspace].remove_record(@domain_id, @record_id) end end @@ -38,43 +38,43 @@ ] tests("add_records(#{@domain_id}, #{records_attributes})").formats(RECORD_LIST_FORMAT) do - response = wait_for @service, @service.add_records(@domain_id, records_attributes) + response = wait_for Fog::DNS[:rackspace], Fog::DNS[:rackspace].add_records(@domain_id, records_attributes) @record_ids = response.body['records'].collect { |record| record['id'] } response.body end tests("remove_records(#{@domain_id}, #{@record_ids})").succeeds do - wait_for @service, @service.remove_records(@domain_id, @record_ids) + wait_for Fog::DNS[:rackspace], Fog::DNS[:rackspace].remove_records(@domain_id, @record_ids) end end tests( 'failure') do tests("list_records('')").raises(ArgumentError) do - @service.list_records('') + Fog::DNS[:rackspace].list_records('') end tests("list_records('abc')").raises(Fog::Rackspace::Errors::NotFound) do - @service.list_records('abc') + Fog::DNS[:rackspace].list_records('abc') end tests("list_record_details(#{@domain_id}, '')").raises(ArgumentError) do - @service.list_record_details(@domain_id, '') + Fog::DNS[:rackspace].list_record_details(@domain_id, '') end tests("list_record_details(#{@domain_id}, 'abc')").raises(Fog::Rackspace::Errors::NotFound) do - @service.list_record_details(@domain_id, 'abc') + Fog::DNS[:rackspace].list_record_details(@domain_id, 'abc') end tests("remove_record(#{@domain_id}, '')").raises(ArgumentError) do - @service.remove_record(@domain_id, '') + Fog::DNS[:rackspace].remove_record(@domain_id, '') end tests("remove_record(#{@domain_id}, 'abc')").raises(Fog::Rackspace::Errors::NotFound) do - @service.remove_record(@domain_id, 'abc') + Fog::DNS[:rackspace].remove_record(@domain_id, 'abc') end tests("add_record(#{@domain_id}, [{ :name => '', :type => '', :data => ''}])").raises(Fog::Rackspace::Errors::BadRequest) do - @service.add_records(@domain_id, [{ :name => '', :type => '', :data => ''}]) + Fog::DNS[:rackspace].add_records(@domain_id, [{ :name => '', :type => '', :data => ''}]) end end end diff --git a/tests/rackspace/requests/access_list_tests.rb b/tests/rackspace/requests/load_balancers/access_list_tests.rb similarity index 100% rename from tests/rackspace/requests/access_list_tests.rb rename to tests/rackspace/requests/load_balancers/access_list_tests.rb diff --git a/tests/rackspace/requests/algorithm_tests.rb b/tests/rackspace/requests/load_balancers/algorithm_tests.rb similarity index 100% rename from tests/rackspace/requests/algorithm_tests.rb rename to tests/rackspace/requests/load_balancers/algorithm_tests.rb diff --git a/tests/rackspace/requests/connection_logging_tests.rb b/tests/rackspace/requests/load_balancers/connection_logging_tests.rb similarity index 100% rename from tests/rackspace/requests/connection_logging_tests.rb rename to tests/rackspace/requests/load_balancers/connection_logging_tests.rb diff --git a/tests/rackspace/requests/connection_throttling_tests.rb b/tests/rackspace/requests/load_balancers/connection_throttling_tests.rb similarity index 100% rename from tests/rackspace/requests/connection_throttling_tests.rb rename to tests/rackspace/requests/load_balancers/connection_throttling_tests.rb diff --git a/tests/rackspace/requests/helper.rb b/tests/rackspace/requests/load_balancers/helper.rb similarity index 89% rename from tests/rackspace/requests/helper.rb rename to tests/rackspace/requests/load_balancers/helper.rb index 640bc97eb7..affd9f5106 100644 --- a/tests/rackspace/requests/helper.rb +++ b/tests/rackspace/requests/load_balancers/helper.rb @@ -14,7 +14,8 @@ 'numVips' => Fog::Nullable::Integer, 'numPolls' => Fog::Nullable::Integer, 'startTime' => Fog::Nullable::String, - 'endTime' => Fog::Nullable::String + 'endTime' => Fog::Nullable::String, + 'vipType' => Fog::Nullable::String, } ] } @@ -86,6 +87,11 @@ 'port' => Integer, 'protocol' => String, 'algorithm' => String, + 'sourceAddresses' => { + 'ipv4Servicenet' => String, + 'ipv4Public' => String, + 'ipv6Public' => String, + }, 'status' => String, 'virtualIps' => [VIRTUAL_IP_FORMAT], 'nodes' => [SINGLE_NODE_FORMAT], @@ -100,6 +106,11 @@ 'port' => Integer, 'protocol' => String, 'algorithm' => String, + 'sourceAddresses' => { + 'ipv4Servicenet' => String, + 'ipv4Public' => String, + 'ipv6Public' => String, + }, 'status' => String, 'cluster' => { 'name' => String }, 'virtualIps' => [VIRTUAL_IP_FORMAT], diff --git a/tests/rackspace/requests/load_balancer_tests.rb b/tests/rackspace/requests/load_balancers/load_balancer_tests.rb similarity index 98% rename from tests/rackspace/requests/load_balancer_tests.rb rename to tests/rackspace/requests/load_balancers/load_balancer_tests.rb index 87eb3e7444..560c27aac1 100644 --- a/tests/rackspace/requests/load_balancer_tests.rb +++ b/tests/rackspace/requests/load_balancers/load_balancer_tests.rb @@ -52,7 +52,7 @@ tests('#get_load_balancer(0)').raises(Fog::Rackspace::LoadBalancers::NotFound) do @service.get_load_balancer(0) end - tests('#delete_load_balancer(0)').raises(Fog::Rackspace::LoadBalancers::NotFound) do + tests('#delete_load_balancer(0)').raises(Fog::Rackspace::LoadBalancers::BadRequest) do @service.delete_load_balancer(0) end tests('#update_load_balancer(0)').raises(Fog::Rackspace::LoadBalancers::NotFound) do diff --git a/tests/rackspace/requests/load_balancer_usage_tests.rb b/tests/rackspace/requests/load_balancers/load_balancer_usage_tests.rb similarity index 100% rename from tests/rackspace/requests/load_balancer_usage_tests.rb rename to tests/rackspace/requests/load_balancers/load_balancer_usage_tests.rb diff --git a/tests/rackspace/requests/monitor_tests.rb b/tests/rackspace/requests/load_balancers/monitor_tests.rb similarity index 100% rename from tests/rackspace/requests/monitor_tests.rb rename to tests/rackspace/requests/load_balancers/monitor_tests.rb diff --git a/tests/rackspace/requests/node_tests.rb b/tests/rackspace/requests/load_balancers/node_tests.rb similarity index 100% rename from tests/rackspace/requests/node_tests.rb rename to tests/rackspace/requests/load_balancers/node_tests.rb diff --git a/tests/rackspace/requests/protocol_tests.rb b/tests/rackspace/requests/load_balancers/protocol_tests.rb similarity index 100% rename from tests/rackspace/requests/protocol_tests.rb rename to tests/rackspace/requests/load_balancers/protocol_tests.rb diff --git a/tests/rackspace/requests/session_persistence_tests.rb b/tests/rackspace/requests/load_balancers/session_persistence_tests.rb similarity index 100% rename from tests/rackspace/requests/session_persistence_tests.rb rename to tests/rackspace/requests/load_balancers/session_persistence_tests.rb diff --git a/tests/rackspace/requests/usage_tests.rb b/tests/rackspace/requests/load_balancers/usage_tests.rb similarity index 100% rename from tests/rackspace/requests/usage_tests.rb rename to tests/rackspace/requests/load_balancers/usage_tests.rb diff --git a/tests/rackspace/requests/virtual_ip_tests.rb b/tests/rackspace/requests/load_balancers/virtual_ip_tests.rb similarity index 94% rename from tests/rackspace/requests/virtual_ip_tests.rb rename to tests/rackspace/requests/load_balancers/virtual_ip_tests.rb index 22f48038e8..46a1a5c6be 100644 --- a/tests/rackspace/requests/virtual_ip_tests.rb +++ b/tests/rackspace/requests/load_balancers/virtual_ip_tests.rb @@ -21,10 +21,10 @@ tests('failure') do #TODO - I feel like this should really be a BadRequest, need to dig in - tests('create_virtual_ip(invalid type)').raises(Fog::Rackspace::LoadBalancers::InteralServerError) do + tests('create_virtual_ip(invalid type)').raises(Fog::Rackspace::LoadBalancers::InternalServerError) do @service.create_virtual_ip(@lb.id, 'badtype') end - tests('delete_virtual_ip(0)').raises(Fog::Rackspace::LoadBalancers::NotFound) do + tests('delete_virtual_ip(0)').raises(Fog::Rackspace::LoadBalancers::BadRequest) do @service.delete_virtual_ip(@lb.id, 0) end end diff --git a/tests/rackspace/url_encoding_tests.rb b/tests/rackspace/url_encoding_tests.rb new file mode 100644 index 0000000000..6e5b92d801 --- /dev/null +++ b/tests/rackspace/url_encoding_tests.rb @@ -0,0 +1,3 @@ +Shindo.tests('Rackspace | url_encoding', ['rackspace']) do + returns( Fog::Rackspace.escape( "is this my file?.jpg" ) ) { "is%20this%20my%20file%3F.jpg" } +end diff --git a/tests/storage/models/directory_test.rb b/tests/storage/models/directory_test.rb index 639ce9241e..74057d9536 100644 --- a/tests/storage/models/directory_test.rb +++ b/tests/storage/models/directory_test.rb @@ -8,7 +8,7 @@ :key => 'fogdirectorytests' }.merge!(config[:directory_attributes] || {}) - model_tests(Fog::Storage[provider].directory, directory_attributes, config[:mocked]) do + model_tests(Fog::Storage[provider].directories, directory_attributes, config[:mocked]) do tests("#public=(true)").succeeds do pending if Fog.mocking? && !config[:mocked] @@ -17,7 +17,7 @@ tests('responds_to(:public_url)') do pending if Fog.mocking? && !config[:mocked] - @instance.responds_to(:public_url) + responds_to(:public_url) end end diff --git a/tests/vsphere/compute_tests.rb b/tests/vsphere/compute_tests.rb index 9858e475f2..13ff9c9465 100644 --- a/tests/vsphere/compute_tests.rb +++ b/tests/vsphere/compute_tests.rb @@ -34,7 +34,7 @@ end tests("Compute attributes") do - %w{ vsphere_is_vcenter vsphere_rev }.each do |attr| + %w{ vsphere_is_vcenter vsphere_rev vsphere_username vsphere_server }.each do |attr| test("it should respond to #{attr}") { compute.respond_to? attr } end end diff --git a/tests/vsphere/requests/compute/clone_vm.rb b/tests/vsphere/requests/compute/clone_vm.rb deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/vsphere/requests/compute/vm_clone_tests.rb b/tests/vsphere/requests/compute/vm_clone_tests.rb index da027964e5..ad7e95a84f 100644 --- a/tests/vsphere/requests/compute/vm_clone_tests.rb +++ b/tests/vsphere/requests/compute/vm_clone_tests.rb @@ -1,4 +1,5 @@ Shindo.tests("Fog::Compute[:servers] | vm_clone request") do + #require 'guid' template = "50323f93-6835-1178-8b8f-9e2109890e1a" compute = Fog::Compute[:vsphere] @@ -15,5 +16,9 @@ 'it should raise ServiceError if a VM already exists with the provided name') do compute.vm_clone('instance_uuid' => '123', 'name' => 'jefftest') end + raises(Fog::Compute::Vsphere::NotFound, 'it should raise Fog::Compute::Vsphere::NotFound when the UUID is not a string') do + pending # require 'guid' + compute.vm_clone('instance_uuid' => Guid.from_s(template), 'name' => 'jefftestfoo') + end end end