-
Notifications
You must be signed in to change notification settings - Fork 3
Replace curl-download with paginated inline results + widget JSON endpoint #232
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
acc104d
a9f4eab
7679603
bee026c
74fc6b5
d233ebb
3057f31
5cfa25a
37af54b
bbad9a2
5c11232
3266f89
b444fe3
83c8f31
499ee35
a88b9d3
70f9096
d514eb9
961392a
c80abc5
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -2,10 +2,12 @@ | |
|
|
||
| from __future__ import annotations | ||
|
|
||
| import io | ||
| import logging | ||
| import secrets | ||
| from uuid import UUID | ||
|
|
||
| import pandas as pd | ||
| from everyrow.api_utils import handle_response | ||
| from everyrow.generated.api.tasks import get_task_status_tasks_task_id_status_get | ||
| from everyrow.generated.client import AuthenticatedClient | ||
|
|
@@ -14,6 +16,7 @@ | |
|
|
||
| from everyrow_mcp import redis_store | ||
| from everyrow_mcp.config import settings | ||
| from everyrow_mcp.result_store import _sanitize_records | ||
| from everyrow_mcp.tool_helpers import _UI_EXCLUDE, TaskState | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
@@ -243,7 +246,7 @@ async def api_download_token(request: Request) -> Response: | |
| return JSONResponse({"download_url": download_url}, headers=cors) | ||
|
|
||
|
|
||
| async def api_download(request: Request) -> Response: | ||
| async def api_download(request: Request) -> Response: # noqa: PLR0911 | ||
| """REST endpoint to download task results as CSV. | ||
|
|
||
| Authenticates via a short-lived, single-use download token (not the | ||
|
|
@@ -290,6 +293,38 @@ async def api_download(request: Request) -> Response: | |
| {"error": "Results not found or expired"}, status_code=404, headers=cors | ||
| ) | ||
|
|
||
| # Validate format parameter | ||
| fmt = request.query_params.get("format", "csv") | ||
| if fmt not in ("csv", "json"): | ||
| return JSONResponse( | ||
| {"error": "Unsupported format"}, status_code=400, headers=cors | ||
| ) | ||
RafaelPo marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| # Return JSON array if requested (used by the widget for full data fetch). | ||
| if fmt == "json": | ||
| # Guard against memory exhaustion — JSON conversion holds ~4x the data | ||
| # in memory (csv string, DataFrame, records list, JSON response body). | ||
| # Use a conservative 10 MB threshold to keep peak memory manageable. | ||
| max_json_size = settings.max_upload_size_bytes // 5 | ||
| if len(csv_text) > max_json_size: | ||
|
Comment on lines
+308
to
+309
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Bug: The code compares the character count of Suggested FixThe check should compare the byte length of the string, not the character count. Encode the string to bytes before checking its length: Prompt for AI Agent |
||
| logger.warning( | ||
| "CSV too large for JSON conversion (%d chars, limit %d) for task %s", | ||
| len(csv_text), | ||
| max_json_size, | ||
| task_id, | ||
| ) | ||
| return JSONResponse( | ||
| {"error": "Result too large for JSON format"}, | ||
| status_code=413, | ||
| headers=cors, | ||
| ) | ||
| df = pd.read_csv(io.StringIO(csv_text)) | ||
| records = _sanitize_records(df.to_dict(orient="records")) | ||
| return JSONResponse( | ||
| records, | ||
| headers={**cors, "X-Content-Type-Options": "nosniff"}, | ||
| ) | ||
|
|
||
| safe_prefix = "".join(c for c in task_id[:8] if c.isalnum() or c == "-") | ||
| return Response( | ||
| content=csv_text, | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.