Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
226a1d0
Saving work, first pass seems to work alright
michael-richey Aug 8, 2025
c8a04c8
Remove idx from output
michael-richey Aug 11, 2025
70b3dbe
Debug messaging
michael-richey Aug 11, 2025
f87c598
Typo
michael-richey Aug 11, 2025
3ce8383
Typo
michael-richey Aug 11, 2025
e0d3088
Add another debug log
michael-richey Aug 11, 2025
eb68c07
Add another debug log
michael-richey Aug 11, 2025
aabf5df
Add another debug log
michael-richey Aug 11, 2025
ea12662
Add more debug logs
michael-richey Aug 11, 2025
8a8bd33
Try altering the starting page number
michael-richey Aug 11, 2025
c4d1ff8
Even more debugging
michael-richey Aug 11, 2025
e0c57c4
Remove extra f strings
michael-richey Aug 11, 2025
0ff79e7
Revert change to start at 1
michael-richey Aug 11, 2025
49c6ad0
Don't reuse pagination config since it gets altered when using it
michael-richey Aug 11, 2025
b0016e9
Don't reuse pagination config since it gets altered when using it
michael-richey Aug 11, 2025
b653a9e
Fix the page number function
michael-richey Aug 11, 2025
23fc663
Correct way to deal with multiple paginated requests
michael-richey Aug 12, 2025
66bcfe2
Clean up debug statements, add comments
michael-richey Aug 12, 2025
e56c0cf
Fixed!!!
michael-richey Aug 12, 2025
f3fd771
Black
michael-richey Aug 12, 2025
6f20900
Fix remaining calcs and reset it before recalculation
michael-richey Aug 12, 2025
3b43ff7
Black
michael-richey Aug 13, 2025
c7a1e97
SDS scans commits, making it hard to commit cassettes for SDS
michael-richey Aug 13, 2025
1445f87
Cassettes
michael-richey Aug 13, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
34 changes: 30 additions & 4 deletions datadog_sync/model/team_memberships.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,21 +32,38 @@ class TeamMemberships(BaseResource):
team_memberships_path = "/api/v2/team/{}/memberships"
destination_team_memberships: List[Dict] = []
# Additional TeamMemberships specific attributes
pagination_config = PaginationConfig(remaining_func=lambda *args: 1)

async def get_resources(self, client: CustomClient) -> List[Dict]:
# get all the teams
teams_pagination_config = PaginationConfig(
page_size=100,
page_number_param="page[number]",
page_size_param="page[size]",
remaining_func=lambda idx, resp, page_size, page_number: max(
0,
resp["meta"]["pagination"]["total"] - (page_size * (idx + 1)),
),
)
teams = await client.paginated_request(client.get)(
self.resource_config.base_path,
pagination_config=self.pagination_config,
pagination_config=teams_pagination_config,
)

# iterate over the teams and create a list of all members of all teams
all_team_memberships = []
for team in teams:
members_pagination_config = PaginationConfig(
page_size=100,
page_number_param="page[number]",
page_size_param="page[size]",
remaining_func=lambda idx, resp, page_size, page_number: max(
0,
resp["meta"]["pagination"]["total"] - (page_size * (idx + 1)),
),
)
members_of_team = await client.paginated_request(client.get)(
self.team_memberships_path.format(team["id"]),
pagination_config=self.pagination_config,
pagination_config=members_pagination_config,
)

# add the team relationship
Expand All @@ -58,11 +75,20 @@ async def get_resources(self, client: CustomClient) -> List[Dict]:

async def import_resource(self, _id: Optional[str] = None, resource: Optional[Dict] = None) -> Tuple[str, Dict]:
source_client = self.config.source_client
pagination_config = PaginationConfig(
page_size=100,
page_number_param="page[number]",
page_size_param="page[size]",
remaining_func=lambda idx, resp, page_size, page_number: max(
0,
resp["meta"]["pagination"]["total"] - (page_size * (idx + 1)),
),
)

if _id:
resource = await source_client.paginated_request(source_client.get)(
self.team_memberships_path.format(_id),
pagination_config=self.pagination_config,
pagination_config=pagination_config,
)

resource = cast(dict, resource)
Expand Down
90 changes: 76 additions & 14 deletions datadog_sync/utils/custom_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,13 @@ async def wrapper(*args, **kwargs):
resources = []
kwargs["params"] = kwargs.get("params", {}) or {}
idx = 0
original_page_size = page_size
restore_page_size = False
resources_attempted = 0
saved_idx = idx
save_idx = True
while remaining > 0:
remaining = 0
log.debug(
f"fetching {args[0]} "
f"{pagination_config.page_number_param}: {page_number} "
Expand All @@ -142,22 +148,78 @@ async def wrapper(*args, **kwargs):
}
kwargs["params"].update(params)

resp = await func(*args, **kwargs)

resp_len = 0
if pagination_config.response_list_accessor:
resources.extend(resp[pagination_config.response_list_accessor])
resp_len = len(resp[pagination_config.response_list_accessor])
else:
resources.extend(resp)
resp_len = len(resp)

if resp_len < page_size:
break

remaining = pagination_config.remaining_func(idx, resp, page_size, page_number)
try:
# call the actual awaitable function
resp = await func(*args, **kwargs)
resp_len = 0

# add resources from the page to our list
if pagination_config.response_list_accessor:
resources.extend(resp[pagination_config.response_list_accessor])
resp_len = len(resp[pagination_config.response_list_accessor])
else:
resources.extend(resp)
resp_len = len(resp)

# if it's a partial page then we're done, it's the last page
if resp_len < page_size:
break

# restore the page size if we had to lower it to deal w/ a bad resource return
resources_attempted += resp_len
if restore_page_size:
if resources_attempted % original_page_size == 0:
page_size = original_page_size
page_number = pagination_config.page_number_func(idx, page_size, page_number)
restore_page_size = False
idx = saved_idx
save_idx = True

remaining = pagination_config.remaining_func(idx, resp, page_size, page_number)
except CustomClientHTTPError as err:
if err.status_code >= 500:
log.warning("500 error during a paginated request, attempting to isolate")

# save the index so we can come back to it after dealing with this batch
if save_idx:
saved_idx = idx
save_idx = False

# we're in the except and our page size is 1, we've found the bad resources
error_handled = False
if page_size == 1:
log.warning("Error isolated, skipping resource:")
log.warning(
f"Fetching {args[0]} "
f"{pagination_config.page_number_param}: {page_number} "
f"{pagination_config.page_size_param}: {page_size} "
)
resources_attempted += 1
error_handled = True

if error_handled:
restore_page_size = True
else:
# reduce the page size by 50% to isolate the bad resource
new_page_size = page_size // 2
# page size can't be 0
if new_page_size == 0:
new_page_size = 1
# to start on the right page number the resources we've attempted so far
# need to be evenly divisible by the page_size
while resources_attempted % new_page_size != 0:
new_page_size -= 1

# set the page_size, idx, and page_number in that order
page_size = new_page_size
idx = resources_attempted // page_size - 1
page_number = pagination_config.page_number_func(idx, page_size, page_number)

# made it through the try/except no increase the page number and idx
page_number = pagination_config.page_number_func(idx, page_size, page_number)
idx += 1

# return our list of good resources
return resources

return wrapper
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2025-07-18T17:44:43.820248-04:00
2025-08-13T09:51:22.867088-04:00
Loading
Loading