Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

digital_ocean.py enhancements #3415

Merged
merged 3 commits into from
Jul 11, 2013
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
104 changes: 68 additions & 36 deletions plugins/inventory/digital_ocean.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,12 @@

The --pretty (-p) option pretty-prints the output for better human readability.

----
Although the cache stores all the information received from DigitalOcean,
the cache is not used for current droplet information (in --list, --host,
--all, and --droplets). This is so that accurate droplet information is always
found. You can force this script to use the cache with --force-cache.

----
Configuration is read from `digital_ocean.ini`, then from environment variables,
then and command-line arguments.
Expand Down Expand Up @@ -178,12 +184,21 @@ def __init__(self):
self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache"
self.cache_refreshed = False

if self.args.refresh_cache or not self.is_cache_valid():
if not self.args.force_cache and self.args.refresh_cache or not self.is_cache_valid():
self.load_all_data_from_digital_ocean()
else:
self.load_from_cache()
if len(self.data) == 0:
if self.args.force_cache:
print '''Cache is empty and --force-cache was specified'''
sys.exit(-1)
self.load_all_data_from_digital_ocean()
else:
# We always get fresh droplets for --list, --host, --all, and --droplets
# unless --force-cache is specified
if not self.args.force_cache and (
self.args.list or self.args.host or self.args.all or self.args.droplets):
self.load_droplets_from_digital_ocean()

# Pick the json_data to print based on the CLI command
if self.args.droplets: json_data = { 'droplets': self.data['droplets'] }
Expand All @@ -194,14 +209,14 @@ def __init__(self):
elif self.args.domains: json_data = { 'domains': self.data['domains'] }
elif self.args.all: json_data = self.data

elif self.args.host: json_data = self.load_droplet_variables_for_host()
elif self.args.host: json_data = self.load_droplet_variables_for_host()
else: # '--list' this is last to make it default
json_data = self.inventory
json_data = self.inventory

if self.args.pretty:
print json.dumps( json_data, sort_keys=True, indent=2 )
print json.dumps(json_data, sort_keys=True, indent=2)
else:
print json.dumps( json_data )
print json.dumps(json_data)
# That's all she wrote...


Expand Down Expand Up @@ -242,7 +257,7 @@ def read_cli_args(self):
parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet')

parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON')
parser.add_argument('--droplets', action='store_true', help='List Droplets as JSON')
parser.add_argument('--droplets','-d', action='store_true', help='List Droplets as JSON')
parser.add_argument('--regions', action='store_true', help='List Regions as JSON')
parser.add_argument('--images', action='store_true', help='List Images as JSON')
parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON')
Expand All @@ -253,7 +268,8 @@ def read_cli_args(self):

parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)')
parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)')
parser.add_argument('--refresh-cache', action='store_true', default=False, help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)')
parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache')
parser.add_argument('--refresh-cache','-r', action='store_true', default=False, help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)')

parser.add_argument('--env','-e', action='store_true', help='Display DO_CLIENT_ID and DO_API_KEY')
parser.add_argument('--client-id','-c', action='store', help='DigitalOcean Client ID')
Expand All @@ -266,6 +282,11 @@ def read_cli_args(self):
if self.args.cache_path: self.cache_path = self.args.cache_path
if self.args.cache_max_age: self.cache_max_age = self.args.cache_max_age

# Make --list default if none of the other commands are specified
if (not self.args.droplets and not self.args.regions and not self.args.images and
not self.args.sizes and not self.args.ssh_keys and not self.args.domains and
not self.args.all and not self.args.host):
self.args.list = True


###########################################################################
Expand All @@ -274,33 +295,43 @@ def read_cli_args(self):

def load_all_data_from_digital_ocean(self):
''' Use dopy to get all the information from DigitalOcean and save data in cache files '''
manager = DoManager( self.client_id, self.api_key )
manager = DoManager(self.client_id, self.api_key)

self.data = {}
self.data['droplets'] = self.sanitize_list( manager.all_active_droplets() )
self.data['regions'] = self.sanitize_list( manager.all_regions() )
self.data['images'] = self.sanitize_list( manager.all_images() )
self.data['sizes'] = self.sanitize_list( manager.sizes() )
self.data['ssh_keys'] = self.sanitize_list( manager.all_ssh_keys() )
self.data['domains'] = self.sanitize_list( manager.all_domains() )
self.data['droplets'] = self.sanitize_list(manager.all_active_droplets())
self.data['regions'] = self.sanitize_list(manager.all_regions())
self.data['images'] = self.sanitize_list(manager.all_images(filter=None))
self.data['sizes'] = self.sanitize_list(manager.sizes())
self.data['ssh_keys'] = self.sanitize_list(manager.all_ssh_keys())
self.data['domains'] = self.sanitize_list(manager.all_domains())

self.index = {}
self.index['region_to_name'] = self.build_index( self.data['regions'], 'id', 'name' )
self.index['size_to_name'] = self.build_index( self.data['sizes'], 'id', 'name' )
self.index['image_to_name'] = self.build_index( self.data['images'], 'id', 'name' )
self.index['image_to_distro'] = self.build_index( self.data['images'], 'id', 'distribution' )
self.index['host_to_droplet'] = self.build_index( self.data['droplets'], 'ip_address', 'id', False )
self.index['region_to_name'] = self.build_index(self.data['regions'], 'id', 'name')
self.index['size_to_name'] = self.build_index(self.data['sizes'], 'id', 'name')
self.index['image_to_name'] = self.build_index(self.data['images'], 'id', 'name')
self.index['image_to_distro'] = self.build_index(self.data['images'], 'id', 'distribution')
self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False)

self.build_inventory()

self.write_to_cache()


def load_droplets_from_digital_ocean(self):
''' Use dopy to get droplet information from DigitalOcean and save data in cache files '''
manager = DoManager(self.client_id, self.api_key)
self.data['droplets'] = self.sanitize_list(manager.all_active_droplets())
self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False)
self.build_inventory()
self.write_to_cache()


def build_index(self, source_seq, key_from, key_to, use_slug=True):
dest_dict = {}
for item in source_seq:
name = (use_slug and item.has_key('slug')) and item['slug'] or item[key_to]
dest_dict[item[key_from]] = name
key = item[key_from]
dest_dict[key] = name
return dest_dict


Expand All @@ -313,23 +344,23 @@ def build_inventory(self):
dest = droplet['ip_address']

self.inventory[droplet['id']] = [dest]
self.push( self.inventory, droplet['name'], dest )
self.push( self.inventory, 'region_'+droplet['region_id'], dest )
self.push( self.inventory, 'image_' +droplet['image_id'], dest )
self.push( self.inventory, 'size_' +droplet['size_id'], dest )
self.push( self.inventory, 'status_'+droplet['status'], dest )
self.push(self.inventory, droplet['name'], dest)
self.push(self.inventory, 'region_'+droplet['region_id'], dest)
self.push(self.inventory, 'image_' +droplet['image_id'], dest)
self.push(self.inventory, 'size_' +droplet['size_id'], dest)
self.push(self.inventory, 'status_'+droplet['status'], dest)

region_name = self.index['region_to_name'][droplet['region_id']]
self.push( self.inventory, 'region_'+region_name, dest )
self.push(self.inventory, 'region_'+region_name, dest)

size_name = self.index['size_to_name'][droplet['size_id']]
self.push( self.inventory, 'size_'+size_name, dest )
self.push(self.inventory, 'size_'+size_name, dest)

image_name = self.index['image_to_name'][droplet['image_id']]
self.push( self.inventory, 'image_'+image_name, dest )
self.push(self.inventory, 'image_'+image_name, dest)

distro_name = self.index['image_to_distro'][droplet['image_id']]
self.push( self.inventory, 'distro_'+distro_name, dest )
self.push(self.inventory, 'distro_'+distro_name, dest)


def load_droplet_variables_for_host(self):
Expand All @@ -338,7 +369,8 @@ def load_droplet_variables_for_host(self):

if not host in self.index['host_to_droplet']:
# try updating cache
self.load_all_data_from_digital_ocean()
if not self.args.force_cache:
self.load_all_data_from_digital_ocean()
if not host in self.index['host_to_droplet']:
# host might not exist anymore
return {}
Expand All @@ -347,13 +379,13 @@ def load_droplet_variables_for_host(self):
if self.cache_refreshed:
for drop in self.data['droplets']:
if drop['ip_address'] == host:
droplet = drop
droplet = self.sanitize_dict(drop)
break
else:
# Cache wasn't refreshed this run, so hit DigitalOcean API
manager = DoManager( self.client_id, self.api_key )
manager = DoManager(self.client_id, self.api_key)
droplet_id = self.index['host_to_droplet'][host]
droplet = self.sanitize_dict( manager.show_droplet( droplet_id ) )
droplet = self.sanitize_dict(manager.show_droplet(droplet_id))

if not droplet:
return {}
Expand Down Expand Up @@ -395,7 +427,7 @@ def load_from_cache(self):
cache = open(self.cache_filename, 'r')
json_data = cache.read()
cache.close()
data = json.loads( json_data )
data = json.loads(json_data)

self.data = data['data']
self.inventory = data['inventory']
Expand All @@ -405,7 +437,7 @@ def load_from_cache(self):
def write_to_cache(self):
''' Writes data in JSON format to a file '''
data = { 'data': self.data, 'index': self.index, 'inventory': self.inventory }
json_data = json.dumps( data, sort_keys=True, indent=2 )
json_data = json.dumps(data, sort_keys=True, indent=2)

cache = open(self.cache_filename, 'w')
cache.write(json_data)
Expand Down Expand Up @@ -441,7 +473,7 @@ def sanitize_dict(self, d):
def sanitize_list(self, seq):
new_seq = []
for d in seq:
new_seq.append( self.sanitize_dict(d) )
new_seq.append(self.sanitize_dict(d))
return new_seq


Expand Down