Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 74 additions & 0 deletions src/pingintel_api/pingvision/pingvision_api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import pathlib
import pprint
import time
from datetime import timedelta
from timeit import default_timer as timer
from typing import BinaryIO, Literal, TypedDict, overload, List
from typing import BinaryIO, TypedDict, Unpack, overload
Expand Down Expand Up @@ -294,6 +295,79 @@ def list_team_members(self, team_uuid: str) -> list:
response_data = response.json()
return response_data

def get_or_create_output_async_start(
self,
pingid: str,
output_format: str,
overwrite_existing: bool = False,
):
url = self.api_url + f"/api/v1/submission/{pingid}/get_or_create_output"
data = {}
if output_format:
data["output_format"] = output_format
if overwrite_existing:
data["overwrite_existing"] = overwrite_existing

response = self.post(url, data=data)
raise_for_status(response)
return response.json()

def get_or_create_output_async_check_progress(self, output_request_id: str):
url = self.api_url + f"/api/v1/submission/get_or_create_output/{output_request_id}"
response = self.get(url)
return response.json()

def get_or_create_output(
self,
pingid: str,
output_format: str,
overwrite_existing: bool = False,
timeout: timedelta | None = timedelta(minutes=5),
) -> t.OutputData:
"""Synchronously get or create an output from a Ping Vision submission. If it exists, it will return immediately.
If it does not exist, it will start the generation process and poll for completion, then return it."""
client = self

start_response = client.get_or_create_output_async_start(
pingid,
output_format,
overwrite_existing,
)

request_status = start_response["request"]["status"]
output_request_id = start_response["request"]["id"]
if request_status == "COMPLETE" or request_status == "FAILED":
response_data = start_response
else:
start_time = time.time()
while 1:
if timeout and time.time() - start_time > timeout.total_seconds():
raise TimeoutError(f"Timeout waiting for output generation: {output_request_id}")
response_data = client.get_or_create_output_async_check_progress(output_request_id)
request_status = response_data["request"]["status"]
POLL_SECS = 2.5
if request_status == "PENDING":
self.logger.info(" - Has not yet been queued for processing.")
time.sleep(POLL_SECS)
elif request_status == "IN_PROGRESS":
self.logger.info(f" - Still in progress: {request_status}")
time.sleep(POLL_SECS)
else:
break

self.logger.info(f"+ Finished with result {response_data.get('result',{}).get('status')}")

result = response_data.get("result", {})
if not result:
raise ValueError(f"Invalid response: {response_data}")
output = t.OutputData(
label=result.get("label", None),
scrubbed_filename=result.get("scrubbed_filename", None),
output_format=result.get("output_format", None),
url=result.get("url", None),
)
return output

def add_data_items(self, pingid: str, action: t.DATA_ITEM_ACTIONS, items: dict[str, str | int | float | bool]):
"""Docs: https://docs.pingintel.com/ping-vision/update-submission/store-additional-data-on-submission"""
url = self.api_url + f"/api/v1/submission/{pingid}/add_data_items"
Expand Down
7 changes: 7 additions & 0 deletions src/pingintel_api/pingvision/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,3 +222,10 @@ class PingVisionTeamsResponse(TypedDict):
class DATA_ITEM_ACTIONS(str, enum.Enum):
UPSERT = "upsert"
REPLACE = "replace"


class OutputData(TypedDict):
label: str
scrubbed_filename: str
output_format: str
url: str
48 changes: 47 additions & 1 deletion src/pingintel_api/pingvisionapi_cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,8 +202,14 @@ def list_teams(ctx, pretty, include_statuses, filter_str, delegate_to_company, d
@click.option("-l", "--page-size", "--limit", default=50)
@click.option("--fields", multiple=True)
@click.option("--search", help="Filter key fields by an arbitrary string")
@click.option(
"--sort-order",
type=click.Choice(["asc", "desc"], case_sensitive=False),
default="desc",
show_default=True,
)
# @click.option("--organization__short_name")
def activity(ctx, pretty, id, cursor_id, prev_cursor_id, page_size, fields, search):
def activity(ctx, pretty, id, cursor_id, prev_cursor_id, page_size, fields, search, sort_order):
"""List submission activity."""
client = get_client(ctx)

Expand All @@ -214,6 +220,7 @@ def activity(ctx, pretty, id, cursor_id, prev_cursor_id, page_size, fields, sear
prev_cursor_id=prev_cursor_id,
fields=fields,
search=search,
sort_order=sort_order,
)
if pretty:
print(f"{'Activity ID':<36}{'Status':<30}{'Created':<20}")
Expand Down Expand Up @@ -256,6 +263,45 @@ def download_document(ctx, document_url, output):
print(f"Downloaded file to {output.name}")


@cli.command()
@click.pass_context
@click.argument("pingid")
@click.option(
"-o",
"--output-format",
required=True,
metavar="OUTPUT_FORMAT",
help="Select an output format.",
)
@click.option(
"--write/--no-write",
is_flag=True,
default=True,
help="(default) Actually write the output. If disabled, download but do not persist the result to disk.",
)
@click.option(
"--overwrite-existing/--no-overwrite-existing",
is_flag=True,
default=False,
help="If set, regenerate the file even if it already exists.",
)
def get_output(ctx, pingid, output_format, write, overwrite_existing):
"""Fetch or generate an output from a previous submission."""
client = get_client(ctx)
output_data = client.get_or_create_output(
pingid,
output_format,
overwrite_existing,
)
output_url = output_data["url"]
filename = output_data.get("scrubbed_filename") or pathlib.Path(output_url).name
if write:
client.download_document(filename, document_url=output_url)
click.echo(f"Downloaded: {filename}")
else:
click.echo(output_url)


@cli.command()
@click.pass_context
@click.option("-d", "--division", type=str, help="Division UUID to filter by")
Expand Down