Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 16 additions & 3 deletions src/taskgraph/util/taskcluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,14 +81,19 @@ def requests_retry_session(
status_forcelist=(500, 502, 503, 504),
concurrency=CONCURRENCY,
session=None,
allowed_methods=None,
):
session = session or requests.Session()
kwargs = {}
if allowed_methods is not None:
kwargs["allowed_methods"] = allowed_methods
retry = Retry(
total=retries,
read=retries,
connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist,
**kwargs,
)

# Default HTTPAdapter uses 10 connections. Mount custom adapter to increase
Expand All @@ -110,11 +115,17 @@ def get_session():
return requests_retry_session(retries=5)


def _do_request(url, method=None, **kwargs):
@functools.lru_cache(maxsize=None)
def get_retry_post_session():
allowed_methods = set(("POST",)) | Retry.DEFAULT_ALLOWED_METHODS
return requests_retry_session(retries=5, allowed_methods=allowed_methods)


def _do_request(url, method=None, session=None, **kwargs):
if method is None:
method = "post" if kwargs else "get"

session = get_session()
if session is None:
session = get_session()
if method == "get":
kwargs["stream"] = True

Expand Down Expand Up @@ -213,6 +224,7 @@ def find_task_id_batched(index_paths, use_proxy=False):
while True:
response = _do_request(
endpoint,
session=get_retry_post_session(),
json={
"indexes": index_paths,
},
Expand Down Expand Up @@ -335,6 +347,7 @@ def status_task_batched(task_ids, use_proxy=False):
while True:
response = _do_request(
endpoint,
session=get_retry_post_session(),
json={
"taskIds": task_ids,
},
Expand Down