Skip to content

Commit

Permalink
Added full multipage support with fixed quotes
Browse files Browse the repository at this point in the history
  • Loading branch information
wolph committed Mar 7, 2017
1 parent b0d74b8 commit c9b954d
Show file tree
Hide file tree
Showing 14 changed files with 204 additions and 150 deletions.
4 changes: 2 additions & 2 deletions digitalocean/Droplet.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def create_multiple(*args, **kwargs):

droplets = []

data = api.get_data("droplets", type=POST, params=data)
data = api.get_data("droplets/", type=POST, params=data)

if data:
action_ids = [data["links"]["actions"][0]["id"]]
Expand Down Expand Up @@ -561,7 +561,7 @@ def create(self, *args, **kwargs):
if self.user_data:
data["user_data"] = self.user_data

data = self.get_data("droplets", type=POST, params=data)
data = self.get_data("droplets/", type=POST, params=data)

if data:
self.id = data['droplet']['id']
Expand Down
59 changes: 4 additions & 55 deletions digitalocean/Manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,57 +25,6 @@ class Manager(BaseAPI):
def __init__(self, *args, **kwargs):
super(Manager, self).__init__(*args, **kwargs)

def get_data(self, *args, **kwargs):
"""
Customized version of get_data to perform __check_actions_in_data.
The default amount of elements per page defined is 200 as explained
here: https://github.com/koalalorenzo/python-digitalocean/pull/78
"""
params = {}
if "params" in kwargs:
params = kwargs["params"]

if "per_page" not in params:
params["per_page"] = 200

kwargs["params"] = params
data = super(Manager, self).get_data(*args, **kwargs)
# if there are more elements available (total) than the elements per
# page, try to deal with pagination. Note: Breaking the logic on
# multiple pages,
if 'meta' in data and 'total' in data['meta']:
if data['meta']['total'] > params['per_page']:
return self.__deal_with_pagination(args[0], data, params)
else:
return data
else:
return data

def __deal_with_pagination(self, url, data, params):
"""
Perform multiple calls in order to have a full list of elements
when the API are "paginated". (content list is divided in more
than one page)
"""
try:
lastpage_url = data['links']['pages']['last']
pages = parse_qs(urlparse(lastpage_url).query)['page'][0]
key, values = data.popitem()
for page in range(2, int(pages) + 1):
params.update({'page': page})
new_data = super(Manager, self).get_data(url, params=params)

more_values = list(new_data.values())[0]
for value in more_values:
values.append(value)
data = {}
data[key] = values
except KeyError: # No pages.
pass

return data

def get_account(self):
"""
Returns an Account object.
Expand All @@ -98,11 +47,11 @@ def get_all_droplets(self, tag_name=None):
"""
This function returns a list of Droplet object.
"""
params = dict()
if tag_name:
params = {"tag_name": tag_name}
data = self.get_data("droplets/", params=params)
else:
data = self.get_data("droplets/")
params["tag_name"] = tag_name

data = self.get_data("droplets/", params=params)

droplets = list()
for jsoned in data['droplets']:
Expand Down
49 changes: 44 additions & 5 deletions digitalocean/baseapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
import logging
import requests
try:
from urlparse import urljoin
import urlparse
except ImportError:
from urllib.parse import urljoin
from urllib import parse as urlparse


GET = 'GET'
Expand Down Expand Up @@ -65,7 +65,7 @@ def __perform_request(self, url, type=GET, params=None):
if not self.token:
raise TokenError("No token provided. Please use a valid token")

url = urljoin(self.end_point, url)
url = urlparse.urljoin(self.end_point, url)

# lookup table to find out the apropriate requests method,
# headers and payload type (json or query parameters)
Expand Down Expand Up @@ -97,6 +97,32 @@ def __perform_request(self, url, type=GET, params=None):

return requests_method(url, **kwargs)

def __deal_with_pagination(self, url, method, params, data):
"""
Perform multiple calls in order to have a full list of elements
when the API are "paginated". (content list is divided in more
than one page)
"""
all_data = data
while data.get("links", {}).get("pages", {}).get("next"):
url, query = data["links"]["pages"]["next"].split("?", 1)
print(params)

# Merge the query parameters
for key, value in urlparse.parse_qs(query).items():
params[key] = value

data = self.__perform_request(url, method, params).json()

# Merge the dictionaries
for key, value in data.items():
if isinstance(value, list) and key in all_data:
all_data[key] += value
else:
all_data[key] = value

return all_data

def get_timeout(self):
"""
Checks if any timeout for the requests to DigitalOcean is required.
Expand All @@ -116,12 +142,18 @@ def get_timeout(self):
def get_data(self, url, type=GET, params=None):
"""
This method is a basic implementation of __call_api that checks
errors too. In cas of success the method will return True or the
errors too. In case of success the method will return True or the
content of the response to the request.
Pagination is automatically detected and handled accordingly
"""
if params is None:
params = dict()

# If per_page is not set, make sure it has a sane default
if type is GET:
params.setdefault("per_page", 200)

req = self.__perform_request(url, type, params)
if req.status_code == 204:
return True
Expand All @@ -140,7 +172,14 @@ def get_data(self, url, type=GET, params=None):
msg = [data[m] for m in ("id", "message") if m in data][1]
raise DataReadError(msg)

return data
# If there are more elements available (total) than the elements per
# page, try to deal with pagination. Note: Breaking the logic on
# multiple pages,
pages = data.get("links", {}).get("pages", {})
if pages.get("next") and "page" not in params:
return self.__deal_with_pagination(url, type, params, data)
else:
return data

def __str__(self):
return "<%s>" % self.__class__.__name__
Expand Down
12 changes: 12 additions & 0 deletions digitalocean/tests/BaseTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
import unittest


DEFAULT_PER_PAGE = 200


class BaseTest(unittest.TestCase):

def setUp(self):
Expand Down Expand Up @@ -35,3 +38,12 @@ def assert_url_query_equal(self, url1, url2):

self.assertEqual(base1, base2)
self.assertEqual(qlist1, qlist2)

def assert_get_url_equal(self, url1, url2):
if "?" in url2:
url2 += "&"
else:
url2 += "?"

url2 += "per_page=%d" % DEFAULT_PER_PAGE
return self.assert_url_query_equal(url1, url2)
7 changes: 4 additions & 3 deletions digitalocean/tests/test_action.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,16 @@ def setUp(self):
def test_load_directly(self):
data = self.load_from_file('actions/ipv6_completed.json')

responses.add(responses.GET, self.base_url + "actions/39388122",
url = self.base_url + "actions/39388122"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')

self.action.load_directly()

self.assertEqual(responses.calls[0].request.url,
self.base_url + "actions/39388122")
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.action.status, "completed")
self.assertEqual(self.action.id, 39388122)
self.assertEqual(self.action.region_slug, 'nyc3')
Expand Down
2 changes: 1 addition & 1 deletion digitalocean/tests/test_certficate.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def test_load(self):

self.cert.load()

self.assertEqual(responses.calls[0].request.url, url)
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.cert.id, self.cert_id)
self.assertEqual(self.cert.name, 'web-cert-01')
self.assertEqual(self.cert.sha1_fingerprint,
Expand Down
31 changes: 18 additions & 13 deletions digitalocean/tests/test_domain.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import json
import unittest

import responses
import json
import digitalocean

import digitalocean
from .BaseTest import BaseTest


Expand All @@ -16,36 +17,39 @@ def setUp(self):
def test_load(self):
data = self.load_from_file('domains/single.json')

responses.add(responses.GET, self.base_url + "domains/example.com",
url = self.base_url + "domains/example.com"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')

domain = digitalocean.Domain(name='example.com', token=self.token)
domain.load()

self.assertEqual(responses.calls[0].request.url,
self.base_url + "domains/example.com")
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(domain.name, "example.com")
self.assertEqual(domain.ttl, 1800)

@responses.activate
def test_destroy(self):
responses.add(responses.DELETE, self.base_url + "domains/example.com",
url = self.base_url + "domains/example.com"
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')

self.domain.destroy()

self.assertEqual(responses.calls[0].request.url,
self.base_url + "domains/example.com")
self.assertEqual(responses.calls[0].request.url, url)

@responses.activate
def test_create_new_domain_record(self):
data = self.load_from_file('domains/create_record.json')

url = self.base_url + "domains/example.com/records"
responses.add(responses.POST,
self.base_url + "domains/example.com/records",
url,
body=data,
status=201,
content_type='application/json')
Expand All @@ -66,8 +70,9 @@ def test_create_new_domain_record(self):
def test_create(self):
data = self.load_from_file('domains/create.json')

url = self.base_url + "domains"
responses.add(responses.POST,
self.base_url + "domains",
url,
body=data,
status=201,
content_type='application/json')
Expand All @@ -87,16 +92,16 @@ def test_create(self):
def test_get_records(self):
data = self.load_from_file('domains/records.json')

url = self.base_url + "domains/example.com/records/"
responses.add(responses.GET,
self.base_url + "domains/example.com/records/",
url,
body=data,
status=200,
content_type='application/json')

records = self.domain.get_records()

self.assertEqual(responses.calls[0].request.url,
self.base_url + "domains/example.com/records/")
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(len(records), 5)
self.assertEqual(records[0].type, "A")
self.assertEqual(records[0].name, "@")
Expand Down

0 comments on commit c9b954d

Please sign in to comment.