Skip to content

Commit

Permalink
Pagination query params according to API versions
Browse files Browse the repository at this point in the history
  • Loading branch information
fao89 committed Sep 1, 2020
1 parent b3695fa commit 94b15f1
Show file tree
Hide file tree
Showing 4 changed files with 37 additions and 13 deletions.
3 changes: 3 additions & 0 deletions CHANGES/7396.bugfix
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Pagination query params according to API versions.
v1 and v2 - `page` and `page_size`
v3 or above - `offset` and `limit`
19 changes: 12 additions & 7 deletions pulp_ansible/app/tasks/collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
)
from pulp_ansible.app.serializers import CollectionVersionSerializer
from pulp_ansible.app.tasks.utils import (
get_api_version,
get_page_url,
parse_metadata,
parse_collections_requirements_file,
Expand Down Expand Up @@ -318,15 +319,15 @@ async def _fetch_collections(self):
remote = self.remote
collection_info = self.collection_info

def _get_url(page):
def _get_url(page, api_version):
if collection_info:
name, version, source = collection_info[page - 1]
namespace, name = name.split(".")
root = source or remote.url
url = f"{root}/api/v2/collections/{namespace}/{name}"
return url

return get_page_url(remote.url, page)
return get_page_url(remote.url, api_version, page)

def _build_url(path_or_url):
"""Check value and turn it into a url using remote.url if it's a relative path."""
Expand All @@ -339,19 +340,21 @@ def _build_url(path_or_url):

progress_data = dict(message="Parsing Galaxy Collections API", code="parsing.collections")
with ProgressReport(**progress_data) as progress_bar:
url = _get_url(page_count)
api_version = get_api_version(remote.url)
url = _get_url(page_count, api_version)
downloader = remote.get_downloader(url=url)
initial_data = parse_metadata(await downloader.run())

count = len(self.collection_info) or initial_data.get("count", 1)
_count = initial_data.get("count") or initial_data.get("meta", {}).get("count", 1)
count = len(self.collection_info) or _count
page_count = math.ceil(float(count) / float(PAGE_SIZE))
progress_bar.total = count
progress_bar.save()

# Concurrent downloads are limited by aiohttp...
not_done = set()
for page in range(1, page_count + 1):
downloader = remote.get_downloader(url=_get_url(page))
downloader = remote.get_downloader(url=_get_url(page, api_version))
not_done.add(downloader.run())

while not_done:
Expand All @@ -360,8 +363,10 @@ def _build_url(path_or_url):
for item in done:
data = parse_metadata(item.result())

# v2 uses 'results' as the key while v3 uses 'data'
results = data.get("results") or data.get("data") or [data]
if api_version < 3:
results = data.get("results", [data])
else:
results = data.get("data", [data])

for result in results:
download_url = result.get("download_url")
Expand Down
7 changes: 4 additions & 3 deletions pulp_ansible/app/tasks/roles.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
)
from pulp_ansible.app.constants import PAGE_SIZE
from pulp_ansible.app.models import RoleRemote, Role
from pulp_ansible.app.tasks.utils import get_page_url, parse_metadata
from pulp_ansible.app.tasks.utils import get_api_version, get_page_url, parse_metadata


log = logging.getLogger(__name__)
Expand Down Expand Up @@ -130,7 +130,8 @@ async def _fetch_galaxy_pages(self):

progress_data = dict(message="Parsing Pages from Galaxy Roles API", code="parsing.roles")
with ProgressReport(**progress_data) as progress_bar:
downloader = remote.get_downloader(url=get_page_url(remote.url))
api_version = get_api_version(remote.url)
downloader = remote.get_downloader(url=get_page_url(remote.url, api_version))
metadata = parse_metadata(await downloader.run())

page_count = math.ceil(float(metadata["count"]) / float(PAGE_SIZE))
Expand All @@ -142,7 +143,7 @@ async def _fetch_galaxy_pages(self):

# Concurrent downloads are limited by aiohttp...
not_done = set(
remote.get_downloader(url=get_page_url(remote.url, page)).run()
remote.get_downloader(url=get_page_url(remote.url, api_version, page)).run()
for page in range(2, page_count + 1)
)

Expand Down
21 changes: 18 additions & 3 deletions pulp_ansible/app/tasks/utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from gettext import gettext as _
import json
import re
import yaml

from urllib.parse import parse_qs, urlencode, urlparse, urlunparse
Expand All @@ -9,12 +10,26 @@
from pulp_ansible.app.constants import PAGE_SIZE


def get_page_url(url, page=1):
def get_api_version(url):
"""Get API version."""
result = re.findall(r"/api/v(\d)", url)
if len(result) != 1:
raise RuntimeError("Could not determine Galaxy API version")
return int(result[0])


def get_page_url(url, api_version, page=1):
"""Get URL page."""
parsed_url = urlparse(url)
new_query = parse_qs(parsed_url.query)
new_query["page"] = page
new_query["page_size"] = PAGE_SIZE

if api_version < 3:
new_query["page"] = page
new_query["page_size"] = PAGE_SIZE
else:
new_query["offset"] = (page - 1) * PAGE_SIZE
new_query["limit"] = PAGE_SIZE

return urlunparse(parsed_url._replace(query=urlencode(new_query, doseq=True)))


Expand Down

0 comments on commit 94b15f1

Please sign in to comment.