Skip to content

Commit

Permalink
basic cycling through paging for aws api (#113)
Browse files Browse the repository at this point in the history
* basic cycling through paging for aws api: keep fetching until nextToken is empty

* keep spacing for pep

* flake8 compliance

* now black is complaining

Co-authored-by: Greg Desmarais <gdesmarais@celsiustx.com>
  • Loading branch information
gregdes and gdesmarais-ctx committed Jul 17, 2020
1 parent 11a4b2c commit 3774956
Showing 1 changed file with 10 additions and 6 deletions.
16 changes: 10 additions & 6 deletions dask_cloudprovider/providers/aws/ecs.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import asyncio
import logging
import sys
import time
import uuid
import warnings
import weakref
Expand Down Expand Up @@ -561,10 +559,11 @@ class ECSCluster(SpecCluster):
Defaults to ``None``
fargate_use_private_ip: bool (optional)
Whether to use a private IP (if True) or public IP (if False) with Fargate.
Default ``False``.
mount_points: list (optional)
List of mount points as documented here: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/efs-volumes.html
List of mount points as documented here:
https://docs.aws.amazon.com/AmazonECS/latest/developerguide/efs-volumes.html
Default ``None``.
volumes: list (optional)
Expand Down Expand Up @@ -972,9 +971,14 @@ async def _delete_role(self, role):
async def _create_cloudwatch_logs_group(self):
log_group_name = "dask-ecs"
async with self._client("logs") as logs:
groups = await logs.describe_log_groups()
log_group_defs = groups["logGroups"]
while groups.get("nextToken"):
groups = await logs.describe_log_groups(nextToken=groups["nextToken"])
log_group_defs.extend(groups["logGroups"])

if log_group_name not in [
group["logGroupName"]
for group in (await logs.describe_log_groups())["logGroups"]
group["logGroupName"] for group in log_group_defs
]:
await logs.create_log_group(logGroupName=log_group_name, tags=self.tags)
await logs.put_retention_policy(
Expand Down

0 comments on commit 3774956

Please sign in to comment.